#50532 Issue 49761 - Fix CI test suite issues
Closed 3 years ago by spichugi. Opened 4 years ago by bsmejkal.
bsmejkal/389-ds-base import_duplicate_dn  into  master

@@ -11,6 +11,7 @@ 

  from lib389.utils import time, ldap, os, logging

  from lib389.topologies import topology_st as topo

  from lib389.dbgen import dbgen

+ from lib389._constants import DEFAULT_SUFFIX

  

  pytestmark = pytest.mark.tier1

  
@@ -149,6 +150,66 @@ 

      assert not topo.standalone.detectDisorderlyShutdown()

  

  

+ @pytest.mark.bz1406101

+ @pytest.mark.ds49071

+ def test_import_duplicate_dn(topo):

+     """Import ldif with duplicate DNs, should not log error "unable to flush"

+ 

+     :id: dce2b898-119d-42b8-a236-1130f58bff17

+     :setup: Standalone instance, ldif file with duplicate entries

+     :steps:

+         1. Create a ldif file with duplicate entries

+         2. Import ldif file to DS

+         3. Check error log file, it should not log "unable to flush"

+         4. Check error log file, it should log "Duplicated DN detected"

+     :expectedresults:

+         1. Success

+         2. Success

+         3. Success

+         4. Success

+     """

+ 

+     standalone = topo.standalone

+ 

+     log.info('Delete the previous error logs')

+     standalone.deleteErrorLogs()

+ 

+     log.info('Create import file')

+     l = """dn: dc=example,dc=com

+ objectclass: top

+ objectclass: domain

+ dc: example

+ 

+ dn: ou=myDups00001,dc=example,dc=com

+ objectclass: top

+ objectclass: organizationalUnit

+ ou: myDups00001

+ 

+ dn: ou=myDups00001,dc=example,dc=com

+ objectclass: top

+ objectclass: organizationalUnit

+ ou: myDups00001

+ """

+ 

+     ldif_dir = standalone.get_ldif_dir()

+     ldif_file = os.path.join(ldif_dir, 'data.ldif')

+     with open(ldif_file, "w") as fd:

+         fd.write(l)

+         fd.close()

+ 

+     log.info('Import ldif with duplicate entry')

+     assert standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX, input_file=ldif_file, args={TASK_WAIT: True})

+ 

+     log.info('Restart the server to flush the logs')

+     standalone.restart()

+ 

+     log.info('Error log should not have "unable to flush" message')

+     assert not standalone.ds_error_log.match('.*unable to flush.*')

+ 

+     log.info('Error log should have "Duplicated DN detected" message')

+     assert standalone.ds_error_log.match('.*Duplicated DN detected.*')

+ 

+ 

  if __name__ == '__main__':

      # Run isolated

      # -s for DEBUG mode

@@ -1,74 +0,0 @@ 

- import pytest

- from lib389.utils import *

- from lib389.topologies import topology_st as topo

- 

- from lib389._constants import DEFAULT_SUFFIX, TASK_WAIT

- 

- pytestmark = pytest.mark.tier2

- 

- DEBUGGING = os.getenv("DEBUGGING", default=False)

- if DEBUGGING:

-     logging.getLogger(__name__).setLevel(logging.DEBUG)

- else:

-     logging.getLogger(__name__).setLevel(logging.INFO)

- log = logging.getLogger(__name__)

- 

- 

- def test_ticket49071(topo):

-     """Verify- Import ldif with duplicate DNs, should not log error "unable to flush"

- 

-     :id: dce2b898-119d-42b8-a236-1130f58bff17

-     :feature: It is to verify bug:1406101, ticket:49071

-     :setup: Standalone instance, ldif file with duplicate entries

-     :steps: 1. Create a ldif file with duplicate entries

-             2. Import ldif file to DS

-             3. Check error log file, it should not log "unable to flush"

-             4. Check error log file, it should log "Duplicated DN detected"

-     :expectedresults: Error log should not contain "unable to flush" error

-     """

- 

-     log.info('ticket 49071: Create import file')

-     l = """dn: dc=example,dc=com

- objectclass: top

- objectclass: domain

- dc: example

- 

- dn: ou=myDups00001,dc=example,dc=com

- objectclass: top

- objectclass: organizationalUnit

- ou: myDups00001

- 

- dn: ou=myDups00001,dc=example,dc=com

- objectclass: top

- objectclass: organizationalUnit

- ou: myDups00001

- """

- 

-     ldif_dir = topo.standalone.get_ldif_dir()

-     ldif_file = os.path.join(ldif_dir, 'data.ldif')

-     with open(ldif_file, "w") as fd:

-         fd.write(l)

-         fd.close()

- 

-     log.info('ticket 49071: Import ldif having duplicate entry')

-     try:

-         topo.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX,

-                                          input_file=ldif_file,

-                                          args={TASK_WAIT: True})

-     except ValueError:

-         log.fatal('ticket 49104: Online import failed')

-         raise

- 

-     log.info('ticket 49071: Error log should not have - unable to flush')

-     assert not topo.standalone.ds_error_log.match('.*unable to flush.*')

- 

-     log.info('ticket 49071: Error log should have - Duplicated DN detected')

-     assert topo.standalone.ds_error_log.match('.*Duplicated DN detected.*')

- 

- 

- if __name__ == '__main__':

-     # Run isolated

-     # -s for DEBUG mode

-     CURRENT_FILE = os.path.realpath(__file__)

-     pytest.main("-s %s" % CURRENT_FILE)

- 

Description:
Fixing failing ticket49071_test.py, so it would pass nightly tests.
Also moved this test to "/suites/import/regression_test.py" and removed ticket49071_test.py from "tickets"

Relates to:
https://pagure.io/389-ds-base/issue/49071
https://pagure.io/389-ds-base/issue/49761

Reviewed by: ???

Can you rebase this please? Otherwise it looks good to me :)

rebased onto 5a2c1fc395ac3a45d49634dbbd3833a916246a7e

4 years ago

rebased onto f5714c1

4 years ago

Pull-Request has been merged by vashirov

4 years ago

389-ds-base is moving from Pagure to Github. This means that new issues and pull requests
will be accepted only in 389-ds-base's github repository.

This pull request has been cloned to Github as issue and is available here:
- https://github.com/389ds/389-ds-base/issues/3588

If you want to continue to work on the PR, please navigate to the github issue,
download the patch from the attachments and file a new pull request.

Thank you for understanding. We apologize for all inconvenience.

Pull-Request has been closed by spichugi

3 years ago