#417 Format code with black
Merged 3 years ago by lsedlar. Opened 3 years ago by hlin.
hlin/odcs black  into  master

file modified
+6
@@ -232,6 +232,12 @@ 

  

  ## Development

  

+ ### Code Convention

+ 

+ The code must be well formatted via ``black`` and pass ``flake8`` checking.

+ 

+ Run ``tox -e black,flake8`` to do the check.

+ 

  ### Unit-testing

  

  Install packages required by pip to compile some python packages:

file modified
+200 -150
@@ -12,31 +12,23 @@ 

  import json

  

  env_config = {

-     'fedora': {

-         'prod': {

-             'server_url': 'https://odcs.fedoraproject.org',

-         },

-         'staging': {

-             'server_url': 'https://odcs.stg.fedoraproject.org',

-         }

+     "fedora": {

+         "prod": {"server_url": "https://odcs.fedoraproject.org"},

+         "staging": {"server_url": "https://odcs.stg.fedoraproject.org"},

+     },

+     "redhat": {

+         "prod": {"server_url": "https://odcs.engineering.redhat.com"},

+         "staging": {"server_url": "https://odcs.stage.engineering.redhat.com"},

      },

-     'redhat': {

-         'prod': {

-             'server_url': 'https://odcs.engineering.redhat.com',

-         },

-         'staging': {

-             'server_url': 'https://odcs.stage.engineering.redhat.com',

-         }

-     }

  }

  

  id_provider_config = {

-     'prod': 'https://id.fedoraproject.org/openidc/',

-     'staging': 'https://id.stg.fedoraproject.org/openidc/',

+     "prod": "https://id.fedoraproject.org/openidc/",

+     "staging": "https://id.stg.fedoraproject.org/openidc/",

  }

  

  parser = argparse.ArgumentParser(

-     description='''\

+     description="""\

  %(prog)s - Command line client.

  

  If you have problems authenticating with OpenID Connect, try:
@@ -45,36 +37,45 @@ 

  

  Example usage:

  

- ''',

+ """,

      formatter_class=argparse.RawDescriptionHelpFormatter,

  )

  parser.add_argument(

-     '--redhat', action='store_const',

-     const='redhat', default='fedora', dest='infra',

-     help='Use internal ODCS infra environment. If omitted, Fedora Infra will '

-          'be used by default.')

- parser.add_argument(

-     '--staging', action='store_const',

-     const='staging', default='prod', dest='env',

-     help='Use Fedora Infra or internal staging environment, which depends on '

-          'if --redhat is specified. If omitted, production environment will '

-          'be used.')

- parser.add_argument(

-     '--server', default=None, help="Use custom ODCS server.")

+     "--redhat",

+     action="store_const",

+     const="redhat",

+     default="fedora",

+     dest="infra",

+     help="Use internal ODCS infra environment. If omitted, Fedora Infra will "

+     "be used by default.",

+ )

  parser.add_argument(

-     '--token', default=None, help="OpenIDC token to use or path to token file")

+     "--staging",

+     action="store_const",

+     const="staging",

+     default="prod",

+     dest="env",

+     help="Use Fedora Infra or internal staging environment, which depends on "

+     "if --redhat is specified. If omitted, production environment will "

+     "be used.",

+ )

+ parser.add_argument("--server", default=None, help="Use custom ODCS server.")

  parser.add_argument(

-     '--no-wait', action='store_true',

-     help='When used, odcs client will not wait for the action to finish.')

+     "--token", default=None, help="OpenIDC token to use or path to token file"

+ )

  parser.add_argument(

-     '-q', '--quiet', action='store_true',

-     help='Run without detailed log messages')

+     "--no-wait",

+     action="store_true",

+     help="When used, odcs client will not wait for the action to finish.",

+ )

  parser.add_argument(

-     '--watch', action='store_true',

-     help="Watch compose logs")

+     "-q", "--quiet", action="store_true", help="Run without detailed log messages"

+ )

+ parser.add_argument("--watch", action="store_true", help="Watch compose logs")

  

  subparsers = parser.add_subparsers(

-     description='These commands you can use to operate composes with ODCS')

+     description="These commands you can use to operate composes with ODCS"

+ )

  

  

  KNOWN_ARGS = {
@@ -90,13 +91,13 @@ 

      "--sigkey": dict(

          default=[],

          action="append",

-         help='ODCS will require that all packages are signed by this ' \

-              'signing key ID. Example: "FD431D51". You may use this option ' \

-              'multiple times to specify multiple key IDs. ODCS will choose ' \

-              'signed packages according to the order of the key IDs that ' \

-              'you specify here. Use "--sigkey none" to allow unsigned ' \

-              'packages. If you do not specify any --sigkey option, ODCS ' \

-              'will use the default signing key list (defined on the server).'

+         help="ODCS will require that all packages are signed by this "

+         'signing key ID. Example: "FD431D51". You may use this option '

+         "multiple times to specify multiple key IDs. ODCS will choose "

+         "signed packages according to the order of the key IDs that "

+         'you specify here. Use "--sigkey none" to allow unsigned '

+         "packages. If you do not specify any --sigkey option, ODCS "

+         "will use the default signing key list (defined on the server).",

      ),

      "--koji-event": dict(default=None, help="Koji event for populating package set"),

      "--arch": dict(
@@ -146,118 +147,157 @@ 

  The create command will be removed and bugs with it are not going to be fixed.

  """

  create_parser = subparsers.add_parser(

-     'create', help='Low-level command to create a new compose (Deprecated)',

-     description=create_command_deprecated)

- create_parser.set_defaults(command='create')

+     "create",

+     help="Low-level command to create a new compose (Deprecated)",

+     description=create_command_deprecated,

+ )

+ create_parser.set_defaults(command="create")

  create_parser.add_argument(

-     'source_type', default=None,

-     choices=['tag', 'module', 'raw_config', 'pulp', 'build'],

-     help="Type for the source, for example: tag.")

+     "source_type",

+     default=None,

+     choices=["tag", "module", "raw_config", "pulp", "build"],

+     help="Type for the source, for example: tag.",

+ )

  create_parser.add_argument(

-     'source', default="",

+     "source",

+     default="",

      help="Source for the compose. May be a koji tag or a "

-     "whitespace separated list of modules.")

+     "whitespace separated list of modules.",

+ )

  create_parser.add_argument(

-     'packages', metavar='package', nargs='*',

-     help='Packages to be included in the compose.')

+     "packages",

+     metavar="package",

+     nargs="*",

+     help="Packages to be included in the compose.",

+ )

  create_parser.add_argument(

-     'builds', metavar='build', nargs='*',

-     help='Builds to be included in the compose.')

- _add_arguments(create_parser, "--result", "--sigkey", "--koji-event",

-                "--arch", "--module-defaults-url", "--module-defaults-commit",

-                "--modular-tag", "--lookaside-repo", "--label", "--compose-type",

-                "--target-dir", "--flag")

+     "builds", metavar="build", nargs="*", help="Builds to be included in the compose."

+ )

+ _add_arguments(

+     create_parser,

+     "--result",

+     "--sigkey",

+     "--koji-event",

+     "--arch",

+     "--module-defaults-url",

+     "--module-defaults-commit",

+     "--modular-tag",

+     "--lookaside-repo",

+     "--label",

+     "--compose-type",

+     "--target-dir",

+     "--flag",

+ )

  

  

  create_tag_parser = subparsers.add_parser(

-     'create-tag', help='Create new compose from Koji tag.')

- create_tag_parser.set_defaults(command='create-tag')

- create_tag_parser.add_argument(

-     'tag', default="",

-     help="Koji tag name.")

+     "create-tag", help="Create new compose from Koji tag."

+ )

+ create_tag_parser.set_defaults(command="create-tag")

+ create_tag_parser.add_argument("tag", default="", help="Koji tag name.")

  create_tag_parser.add_argument(

-     'packages', metavar='package', nargs='*',

-     help='Koji packages to be included in the compose.')

+     "packages",

+     metavar="package",

+     nargs="*",

+     help="Koji packages to be included in the compose.",

+ )

  _add_arguments(

-     create_tag_parser, "--sigkey", "--koji-event", "--arch", "--module-defaults-url",

-     "--module-defaults-commit", "--modular-tag", "--lookaside-repo", "--target-dir",

-     "--build", "--flag")

+     create_tag_parser,

+     "--sigkey",

+     "--koji-event",

+     "--arch",

+     "--module-defaults-url",

+     "--module-defaults-commit",

+     "--modular-tag",

+     "--lookaside-repo",

+     "--target-dir",

+     "--build",

+     "--flag",

+ )

  

  

  create_module_parser = subparsers.add_parser(

-     'create-module', help='Create new compose from modules.')

- create_module_parser.set_defaults(command='create-module')

+     "create-module", help="Create new compose from modules."

+ )

+ create_module_parser.set_defaults(command="create-module")

  create_module_parser.add_argument(

-     'modules', metavar='modules', nargs='+',

-     help='List of modules in N:S, N:S:V or N:S:V:C format.')

+     "modules",

+     metavar="modules",

+     nargs="+",

+     help="List of modules in N:S, N:S:V or N:S:V:C format.",

+ )

  _add_arguments(

-     create_module_parser, "--sigkey", "--arch", "--module-defaults-url",

-     "--module-defaults-commit", "--lookaside-repo", "--target-dir", "--flag")

+     create_module_parser,

+     "--sigkey",

+     "--arch",

+     "--module-defaults-url",

+     "--module-defaults-commit",

+     "--lookaside-repo",

+     "--target-dir",

+     "--flag",

+ )

  

  

  create_pulp_parser = subparsers.add_parser(

-     'create-pulp', help='Create new compose from Pulp content_sets.')

- create_pulp_parser.set_defaults(command='create-pulp')

+     "create-pulp", help="Create new compose from Pulp content_sets."

+ )

+ create_pulp_parser.set_defaults(command="create-pulp")

  create_pulp_parser.add_argument(

-     'content_sets', metavar='content_set', nargs='+',

-     help='Content sets to be included in the compose.')

+     "content_sets",

+     metavar="content_set",

+     nargs="+",

+     help="Content sets to be included in the compose.",

+ )

  _add_arguments(create_pulp_parser, "--target-dir", "--flag")

  

  

  create_raw_config_parser = subparsers.add_parser(

-     'create-raw-config', help='Create new compose from Pungi raw configuration.')

- create_raw_config_parser.set_defaults(command='create-raw-config')

+     "create-raw-config", help="Create new compose from Pungi raw configuration."

+ )

+ create_raw_config_parser.set_defaults(command="create-raw-config")

  create_raw_config_parser.add_argument(

-     'raw_config_name',

-     help='Name of raw_config compose as defined in ODCS Server.')

+     "raw_config_name", help="Name of raw_config compose as defined in ODCS Server."

+ )

  create_raw_config_parser.add_argument(

-     'raw_config_commit',

-     help='Commit or branch name to get raw_config from.')

+     "raw_config_commit", help="Commit or branch name to get raw_config from."

+ )

  _add_arguments(

-     create_raw_config_parser, "--sigkey", "--label", "--compose-type", "--koji-event",

-     "--target-dir")

+     create_raw_config_parser,

+     "--sigkey",

+     "--label",

+     "--compose-type",

+     "--koji-event",

+     "--target-dir",

+ )

  

  

  create_build_parser = subparsers.add_parser(

-     'create-build', help='Create new compose from Koji builds.')

- create_build_parser.set_defaults(command='create-build')

+     "create-build", help="Create new compose from Koji builds."

+ )

+ create_build_parser.set_defaults(command="create-build")

  create_build_parser.add_argument(

-     'builds', metavar='NVR', nargs='+',

-     help='Koji builds NVRs.')

+     "builds", metavar="NVR", nargs="+", help="Koji builds NVRs."

+ )

  _add_arguments(create_build_parser, "--sigkey", "--flag", "--target-dir")

  

  

- wait_parser = subparsers.add_parser(

-     'wait', help='wait for a compose to finish')

- wait_parser.set_defaults(command='wait')

- wait_parser.add_argument(

-     'compose_id', default=None,

-     help="ODCS compose id")

- wait_parser.add_argument(

-     '--watch', action='store_true',

-     help="Watch compose logs")

- 

- 

- delete_parser = subparsers.add_parser(

-     'delete', help='delete compose')

- delete_parser.set_defaults(command='delete')

- delete_parser.add_argument(

-     'compose_id', default=None,

-     help="ODCS compose id")

- 

- renew_parser = subparsers.add_parser(

-     'renew', help='renew compose')

- renew_parser.set_defaults(command='renew')

- renew_parser.add_argument(

-     'compose_id', default=None,

-     help="ODCS compose id")

- 

- get_parser = subparsers.add_parser(

-     'get', help='get compose info')

- get_parser.set_defaults(command='get')

- get_parser.add_argument(

-     'compose_id', default=None,

-     help="ODCS compose id")

+ wait_parser = subparsers.add_parser("wait", help="wait for a compose to finish")

+ wait_parser.set_defaults(command="wait")

+ wait_parser.add_argument("compose_id", default=None, help="ODCS compose id")

+ wait_parser.add_argument("--watch", action="store_true", help="Watch compose logs")

+ 

+ 

+ delete_parser = subparsers.add_parser("delete", help="delete compose")

+ delete_parser.set_defaults(command="delete")

+ delete_parser.add_argument("compose_id", default=None, help="ODCS compose id")

+ 

+ renew_parser = subparsers.add_parser("renew", help="renew compose")

+ renew_parser.set_defaults(command="renew")

+ renew_parser.add_argument("compose_id", default=None, help="ODCS compose id")

+ 

+ get_parser = subparsers.add_parser("get", help="get compose info")

+ get_parser.set_defaults(command="get")

+ get_parser.add_argument("compose_id", default=None, help="ODCS compose id")

  

  args = parser.parse_args()

  
@@ -266,11 +306,11 @@ 

      sys.exit(0)

  

  if args.server is None:

-     odcs_url = env_config[args.infra][args.env]['server_url']

+     odcs_url = env_config[args.infra][args.env]["server_url"]

  else:

      odcs_url = args.server

  

- if args.infra == 'fedora':

+ if args.infra == "fedora":

      if args.token:

          if os.path.exists(args.token):

              with open(args.token, "r") as token_file:
@@ -282,19 +322,19 @@ 

  

          # Get the auth token using the OpenID client.

          oidc = openidc_client.OpenIDCClient(

-             'odcs',

+             "odcs",

              id_provider,

-             {'Token': 'Token', 'Authorization': 'Authorization'},

-             'odcs-authorizer',

-             'notsecret',

+             {"Token": "Token", "Authorization": "Authorization"},

+             "odcs-authorizer",

+             "notsecret",

          )

  

          scopes = [

-             'openid',

-             'https://id.fedoraproject.org/scope/groups',

-             'https://pagure.io/odcs/new-compose',

-             'https://pagure.io/odcs/renew-compose',

-             'https://pagure.io/odcs/delete-compose',

+             "openid",

+             "https://id.fedoraproject.org/scope/groups",

+             "https://pagure.io/odcs/new-compose",

+             "https://pagure.io/odcs/renew-compose",

+             "https://pagure.io/odcs/delete-compose",

          ]

          try:

              token = oidc.get_token(scopes, new_token=True)
@@ -304,14 +344,11 @@ 

              raise

  

      client = odcs.client.odcs.ODCS(

-         odcs_url,

-         auth_mech=odcs.client.odcs.AuthMech.OpenIDC,

-         openidc_token=token,

+         odcs_url, auth_mech=odcs.client.odcs.AuthMech.OpenIDC, openidc_token=token,

      )

  else:

      client = odcs.client.odcs.ODCS(

-         odcs_url,

-         auth_mech=odcs.client.odcs.AuthMech.Kerberos,

+         odcs_url, auth_mech=odcs.client.odcs.AuthMech.Kerberos,

      )

  

  request_args = {}
@@ -329,7 +366,7 @@ 

      request_args["target_dir"] = args.target_dir

  

  try:

-     args.sigkey = [key.replace('none', '') for key in getattr(args, "sigkey", [])]

+     args.sigkey = [key.replace("none", "") for key in getattr(args, "sigkey", [])]

      if args.command == "create":

          print(create_command_deprecated, file=sys.stderr)

          result = client.new_compose(
@@ -347,23 +384,34 @@ 

          )

      elif args.command == "create-tag":

          source = odcs.client.odcs.ComposeSourceTag(

-             args.tag, args.packages, args.build, args.sigkey, args.koji_event,

-             args.modular_tag, args.module_defaults_url, args.module_defaults_commit)

+             args.tag,

+             args.packages,

+             args.build,

+             args.sigkey,

+             args.koji_event,

+             args.modular_tag,

+             args.module_defaults_url,

+             args.module_defaults_commit,

+         )

          result = client.request_compose(source, **request_args)

      elif args.command == "create-module":

          source = odcs.client.odcs.ComposeSourceModule(

-             args.modules, args.sigkey, args.module_defaults_url, args.module_defaults_commit)

+             args.modules,

+             args.sigkey,

+             args.module_defaults_url,

+             args.module_defaults_commit,

+         )

          result = client.request_compose(source, **request_args)

      elif args.command == "create-pulp":

          source = odcs.client.odcs.ComposeSourcePulp(args.content_sets)

          result = client.request_compose(source, **request_args)

      elif args.command == "create-raw-config":

          source = odcs.client.odcs.ComposeSourceRawConfig(

-             args.raw_config_name, args.raw_config_commit, args.koji_event)

+             args.raw_config_name, args.raw_config_commit, args.koji_event

+         )

          result = client.request_compose(source, **request_args)

      elif args.command == "create-build":

-         source = odcs.client.odcs.ComposeSourceBuild(

-             args.builds, args.sigkey)

+         source = odcs.client.odcs.ComposeSourceBuild(args.builds, args.sigkey)

          result = client.request_compose(source, **request_args)

      elif args.command == "wait":

          result = {"id": int(args.compose_id)}
@@ -384,8 +432,10 @@ 

      print(json.dumps(result, indent=4, sort_keys=True))

  else:

      if not args.quiet:

-         print("Waiting for command %s on compose %d to finish." %

-               (args.command, result["id"]))

+         print(

+             "Waiting for command %s on compose %d to finish."

+             % (args.command, result["id"])

+         )

      try:

          result = client.wait_for_compose(result["id"], 3600, watch_logs=args.watch)

      except (KeyboardInterrupt, SystemExit):

file modified
+1 -1
@@ -1,1 +1,1 @@ 

- __path__ = __import__('pkgutil').extend_path(__path__, __name__)

+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)

file modified
+166 -91
@@ -82,49 +82,61 @@ 

          if type_error:

              raise TypeError(type_error)

          else:

-             raise TypeError('Value {0} is not an integer.'.format(value))

+             raise TypeError("Value {0} is not an integer.".format(value))

      if value < min:

          if value_error:

              raise ValueError(value_error)

          else:

              raise ValueError(

-                 'Value {0} is less than minimal value {1}.'.format(

-                     value, min))

+                 "Value {0} is less than minimal value {1}.".format(value, min)

+             )

  

  

  def validate_page(value):

      validate_int(

          value,

-         type_error='page number must be an integer.',

-         value_error='page number must start from 1.')

+         type_error="page number must be an integer.",

+         value_error="page number must start from 1.",

+     )

  

  

  def validate_per_page(value):

      validate_int(

          value,

-         type_error='per_page must be an integer.',

-         value_error='per_page must be greater than or equal to 1.')

+         type_error="per_page must be an integer.",

+         value_error="per_page must be greater than or equal to 1.",

+     )

  

  

  class ComposeSourceGeneric(object):

-     def __init__(self, source, source_type, packages=None, builds=None, sigkeys=None,

-                  koji_event=None, modular_koji_tags=None, module_defaults_url=None,

-                  module_defaults_commit=None, **kwargs):

-         self.source = {'source': source, 'type': source_type}

+     def __init__(

+         self,

+         source,

+         source_type,

+         packages=None,

+         builds=None,

+         sigkeys=None,

+         koji_event=None,

+         modular_koji_tags=None,

+         module_defaults_url=None,

+         module_defaults_commit=None,

+         **kwargs

+     ):

+         self.source = {"source": source, "type": source_type}

          if packages:

-             self.source['packages'] = packages

+             self.source["packages"] = packages

          if builds:

-             self.source['builds'] = builds

+             self.source["builds"] = builds

          if sigkeys:

-             self.source['sigkeys'] = sigkeys

+             self.source["sigkeys"] = sigkeys

          if koji_event:

-             self.source['koji_event'] = koji_event

+             self.source["koji_event"] = koji_event

          if modular_koji_tags:

-             self.source['modular_koji_tags'] = modular_koji_tags

+             self.source["modular_koji_tags"] = modular_koji_tags

          if module_defaults_url:

-             self.source['module_defaults_url'] = module_defaults_url

+             self.source["module_defaults_url"] = module_defaults_url

          if module_defaults_commit:

-             self.source['module_defaults_commit'] = module_defaults_commit

+             self.source["module_defaults_commit"] = module_defaults_commit

          self.source.update(kwargs)

  

  
@@ -132,9 +144,19 @@ 

      """

      Compose source taking Koji tag as input.

      """

-     def __init__(self, tag, packages=None, builds=None, sigkeys=None,

-                  koji_event=None, modular_koji_tags=None, module_defaults_url=None,

-                  module_defaults_commit=None, **kwargs):

+ 

+     def __init__(

+         self,

+         tag,

+         packages=None,

+         builds=None,

+         sigkeys=None,

+         koji_event=None,

+         modular_koji_tags=None,

+         module_defaults_url=None,

+         module_defaults_commit=None,

+         **kwargs

+     ):

          """

          Creates new ComposeSourceTag instance.

  
@@ -156,16 +178,32 @@ 

              module defaults repository.

          """

          super(ComposeSourceTag, self).__init__(

-             tag, "tag", packages, builds, sigkeys, koji_event, modular_koji_tags,

-             module_defaults_url, module_defaults_commit, **kwargs)

+             tag,

+             "tag",

+             packages,

+             builds,

+             sigkeys,

+             koji_event,

+             modular_koji_tags,

+             module_defaults_url,

+             module_defaults_commit,

+             **kwargs

+         )

  

  

  class ComposeSourceModule(ComposeSourceGeneric):

      """

      Compose source taking list of modules as input.

      """

-     def __init__(self, modules, sigkeys=None, module_defaults_url=None,

-                  module_defaults_commit=None, **kwargs):

+ 

+     def __init__(

+         self,

+         modules,

+         sigkeys=None,

+         module_defaults_url=None,

+         module_defaults_commit=None,

+         **kwargs

+     ):

          """

          Creates new ComposeSourceModule instance.

  
@@ -177,15 +215,20 @@ 

              module defaults repository.

          """

          super(ComposeSourceModule, self).__init__(

-             " ".join(modules), "module", sigkeys=sigkeys,

+             " ".join(modules),

+             "module",

+             sigkeys=sigkeys,

              module_defaults_url=module_defaults_url,

-             module_defaults_commit=module_defaults_commit, **kwargs)

+             module_defaults_commit=module_defaults_commit,

+             **kwargs

+         )

  

  

  class ComposeSourcePulp(ComposeSourceGeneric):

      """

      Compose source taking list of Pulp content_sets as input.

      """

+ 

      def __init__(self, content_sets, **kwargs):

          """

          Creates new ComposeSourcePulp instance.
@@ -194,13 +237,15 @@ 

              content-sets will be included in the compose.

          """

          super(ComposeSourcePulp, self).__init__(

-             " ".join(content_sets), "pulp", **kwargs)

+             " ".join(content_sets), "pulp", **kwargs

+         )

  

  

  class ComposeSourceRawConfig(ComposeSourceGeneric):

      """

      Compose source taking raw Pungi configuration file as input.

      """

+ 

      def __init__(self, config_name, commit, koji_event=None, **kwargs):

          """

          Creates new ComposeSourceRawConfig instance.
@@ -211,14 +256,18 @@ 

              latest Koji event will be used.

          """

          super(ComposeSourceRawConfig, self).__init__(

-             "%s#%s" % (config_name, commit), "raw_config", koji_event=koji_event,

-             **kwargs)

+             "%s#%s" % (config_name, commit),

+             "raw_config",

+             koji_event=koji_event,

+             **kwargs

+         )

  

  

  class ComposeSourceBuild(ComposeSourceGeneric):

      """

      Compose source taking list of Koji builds as input.

      """

+ 

      def __init__(self, builds, sigkeys=None, **kwargs):

          """

          Creates new ComposeSourceModule instance.
@@ -228,15 +277,23 @@ 

              signed. Empty string in the list allows unsigned packages.

          """

          super(ComposeSourceBuild, self).__init__(

-             "", "build", builds=builds, sigkeys=sigkeys, **kwargs)

+             "", "build", builds=builds, sigkeys=sigkeys, **kwargs

+         )

  

  

  class ODCS(object):

      """Client API to interact with ODCS APIs"""

  

-     def __init__(self, server_url, api_version='1', verify_ssl=True,

-                  auth_mech=None, openidc_token=None, ssl_cert=None,

-                  ssl_key=None):

+     def __init__(

+         self,

+         server_url,

+         api_version="1",

+         verify_ssl=True,

+         auth_mech=None,

+         openidc_token=None,

+         ssl_cert=None,

+         ssl_key=None,

+     ):

          """Initialize ODCS client

  

          :param str server_url: server URL of ODCS.
@@ -263,13 +320,17 @@ 

          self._api_version = api_version

          self._verify_ssl = verify_ssl

          if auth_mech == AuthMech.OpenIDC and not openidc_token:

-             raise ValueError('OpenIDC token must be specified when OpenIDC'

-                              ' authentication is enabled.')

+             raise ValueError(

+                 "OpenIDC token must be specified when OpenIDC"

+                 " authentication is enabled."

+             )

          self._openidc_token = openidc_token

  

          if auth_mech == AuthMech.SSL and (not ssl_cert or not ssl_key):

-             raise ValueError('SSL cert and keymust be specified when SSL'

-                              ' authentication is enabled.')

+             raise ValueError(

+                 "SSL cert and keymust be specified when SSL"

+                 " authentication is enabled."

+             )

          self._ssl_cert = ssl_cert

          self._ssl_key = ssl_key

  
@@ -278,7 +339,8 @@ 

          else:

              if not AuthMech.has(auth_mech):

                  raise ValueError(

-                     'Unknown authentication mechanism {0}'.format(auth_mech))

+                     "Unknown authentication mechanism {0}".format(auth_mech)

+                 )

              self._auth_mech = auth_mech

  

      @property
@@ -306,7 +368,8 @@ 

          """

          return urllib_parse.urljoin(

              self._server_url,

-             'api/{0}/{1}'.format(self.api_version, resource_path.lstrip('/')))

+             "api/{0}/{1}".format(self.api_version, resource_path.lstrip("/")),

+         )

  

      def _make_request(self, method, resource_path, data=None):

          """Make a HTTP request to server
@@ -325,24 +388,24 @@ 

          request_data = {}

          headers = {}

          if data:

-             if method in ('post', 'patch'):

-                 request_data['data'] = json.dumps(data)

-                 headers['Content-Type'] = 'application/json'

-             if method == 'get':

-                 request_data['params'] = data

+             if method in ("post", "patch"):

+                 request_data["data"] = json.dumps(data)

+                 headers["Content-Type"] = "application/json"

+             if method == "get":

+                 request_data["params"] = data

          if not self._verify_ssl:

-             request_data['verify'] = False

+             request_data["verify"] = False

          if self.auth_mech == AuthMech.OpenIDC:

-             headers['Authorization'] = 'Bearer {0}'.format(self._openidc_token)

+             headers["Authorization"] = "Bearer {0}".format(self._openidc_token)

          elif self.auth_mech == AuthMech.Kerberos:

-             request_data['auth'] = HTTPKerberosAuth()

+             request_data["auth"] = HTTPKerberosAuth()

          elif self.auth_mech == AuthMech.SSL:

-             request_data['cert'] = (self._ssl_cert, self._ssl_key)

+             request_data["cert"] = (self._ssl_cert, self._ssl_key)

  

          # Anonymous is the last possible value and no auth should be set

  

          if headers:

-             request_data['headers'] = headers

+             request_data["headers"] = headers

  

          request_method = getattr(requests, method)

          resource_url = self._make_endpoint(resource_path)
@@ -357,26 +420,39 @@ 

  

      def _get(self, resource_path, data=None):

          """Make a GET HTTP request to server"""

-         return self._make_request('get', resource_path, data)

+         return self._make_request("get", resource_path, data)

  

      def _post(self, resource_path, data=None):

          """Make a POST HTTP request to server"""

-         return self._make_request('post', resource_path, data)

+         return self._make_request("post", resource_path, data)

  

      def _delete(self, resource_path, data=None):

          """Make a DELETE HTTP request to server"""

-         return self._make_request('delete', resource_path, data)

+         return self._make_request("delete", resource_path, data)

  

      def _patch(self, resource_path, data=None):

          """Make a PATCH HTTP request to server"""

-         return self._make_request('patch', resource_path, data)

- 

-     def new_compose(self, source, source_type,

-                     seconds_to_live=None, packages=[], flags=[],

-                     sigkeys=None, koji_event=None, results=None,

-                     arches=None, builds=None, modular_koji_tags=None,

-                     module_defaults_url=None, module_defaults_commit=None,

-                     lookaside_repos=None, label=None, compose_type=None):

+         return self._make_request("patch", resource_path, data)

+ 

+     def new_compose(

+         self,

+         source,

+         source_type,

+         seconds_to_live=None,

+         packages=[],

+         flags=[],

+         sigkeys=None,

+         koji_event=None,

+         results=None,

+         arches=None,

+         builds=None,

+         modular_koji_tags=None,

+         module_defaults_url=None,

+         module_defaults_commit=None,

+         lookaside_repos=None,

+         label=None,

+         compose_type=None,

+     ):

          """Request a new compose

  

          .. warning::
@@ -409,39 +485,37 @@ 

          :return: the newly created Compose

          :rtype: dict

          """

-         request_data = {

-             'source': {'source': source, 'type': source_type}

-         }

+         request_data = {"source": {"source": source, "type": source_type}}

          if packages:

-             request_data['source']['packages'] = packages

+             request_data["source"]["packages"] = packages

          if builds:

-             request_data['source']['builds'] = builds

+             request_data["source"]["builds"] = builds

          if sigkeys:

-             request_data['source']['sigkeys'] = sigkeys

+             request_data["source"]["sigkeys"] = sigkeys

          if koji_event:

-             request_data['source']['koji_event'] = koji_event

+             request_data["source"]["koji_event"] = koji_event

          if modular_koji_tags:

-             request_data['source']['modular_koji_tags'] = modular_koji_tags

+             request_data["source"]["modular_koji_tags"] = modular_koji_tags

          if module_defaults_url:

-             request_data['source']['module_defaults_url'] = module_defaults_url

+             request_data["source"]["module_defaults_url"] = module_defaults_url

          if module_defaults_commit:

-             request_data['source']['module_defaults_commit'] = module_defaults_commit

+             request_data["source"]["module_defaults_commit"] = module_defaults_commit

          if lookaside_repos:

-             request_data['lookaside_repos'] = lookaside_repos

+             request_data["lookaside_repos"] = lookaside_repos

          if label:

-             request_data['label'] = label

+             request_data["label"] = label

          if compose_type:

-             request_data['compose_type'] = compose_type

+             request_data["compose_type"] = compose_type

          if seconds_to_live is not None:

-             request_data['seconds-to-live'] = seconds_to_live

+             request_data["seconds-to-live"] = seconds_to_live

          if flags:

-             request_data['flags'] = flags

+             request_data["flags"] = flags

          if results:

-             request_data['results'] = results

+             request_data["results"] = results

          if arches:

-             request_data['arches'] = arches

+             request_data["arches"] = arches

  

-         r = self._post('composes/', request_data)

+         r = self._post("composes/", request_data)

          return r.json()

  

      def request_compose(self, source, **kwargs):
@@ -477,11 +551,11 @@ 

          :rtype: dict

          """

          if seconds_to_live is not None:

-             request_data = {'seconds-to-live': seconds_to_live}

+             request_data = {"seconds-to-live": seconds_to_live}

          else:

              request_data = None

  

-         r = self._patch('composes/{0}'.format(compose_id), request_data)

+         r = self._patch("composes/{0}".format(compose_id), request_data)

          return r.json()

  

      def find_composes(self, **search_criteria):
@@ -493,12 +567,12 @@ 

          :return: list of found composes, each of them is a dict.

          :rtype: list

          """

-         if 'page' in search_criteria:

-             validate_page(search_criteria['page'])

-         if 'per_page' in search_criteria:

-             validate_per_page(search_criteria['per_page'])

+         if "page" in search_criteria:

+             validate_page(search_criteria["page"])

+         if "per_page" in search_criteria:

+             validate_per_page(search_criteria["per_page"])

  

-         r = self._get('composes/', search_criteria)

+         r = self._get("composes/", search_criteria)

          return r.json()

  

      def delete_compose(self, compose_id):
@@ -508,7 +582,7 @@ 

          :return: a mapping representing the acknowledge of a compose is delete.

          :rtype: dict

          """

-         r = self._delete('composes/{0}'.format(compose_id))

+         r = self._delete("composes/{0}".format(compose_id))

          return r.json()

  

      def get_compose(self, compose_id):
@@ -518,7 +592,7 @@ 

          :return: a mapping representing a compose.

          :rtype: dict

          """

-         r = self._get('composes/{0}'.format(compose_id))

+         r = self._get("composes/{0}".format(compose_id))

          return r.json()

  

      def wait_for_compose(self, compose_id, timeout=300, watch_logs=False):
@@ -555,14 +629,15 @@ 

                  data = log.read()

                  if data:

                      print(data)

-             if compose['state_name'] not in ['wait', 'generating']:

+             if compose["state_name"] not in ["wait", "generating"]:

                  return compose

  

              elapsed = time.time() - start_time

              if elapsed >= timeout:

                  raise RuntimeError(

-                     "Retrieving ODCS compose %s timed out after %s seconds" %

-                     (compose_id, timeout))

+                     "Retrieving ODCS compose %s timed out after %s seconds"

+                     % (compose_id, timeout)

+                 )

  

              time.sleep(sleep_time)

  

file modified
+259 -259
@@ -30,8 +30,14 @@ 

  

  from odcs.client.odcs import AuthMech

  from odcs.client.odcs import (

-     ODCS, ComposeSourceTag, ComposeSourceModule, ComposeSourcePulp,

-     ComposeSourceRawConfig, ComposeSourceBuild, ComposeLog)

+     ODCS,

+     ComposeSourceTag,

+     ComposeSourceModule,

+     ComposeSourcePulp,

+     ComposeSourceRawConfig,

+     ComposeSourceBuild,

+     ComposeLog,

+ )

  from odcs.client.odcs import validate_int

  

  
@@ -39,9 +45,9 @@ 

      """Test validate_int"""

  

      def test_failure_validate_int(self):

-         self.assertRaises(TypeError, validate_int, 'a')

-         self.assertRaises(TypeError, validate_int, '1')

-         self.assertRaises(TypeError, validate_int, '')

+         self.assertRaises(TypeError, validate_int, "a")

+         self.assertRaises(TypeError, validate_int, "1")

+         self.assertRaises(TypeError, validate_int, "")

          self.assertRaises(TypeError, validate_int, None)

          self.assertRaises(ValueError, validate_int, 0)

          self.assertRaises(ValueError, validate_int, -1)
@@ -55,127 +61,132 @@ 

      """Test ODCS._make_endpoint"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

  

      def test_make_endpoint(self):

          odcs = ODCS(self.server_url)

  

-         endpoint_url = odcs._make_endpoint('composes/')

+         endpoint_url = odcs._make_endpoint("composes/")

          self.assertEqual(

-             '{0}api/{1}/composes/'.format(self.server_url,

-                                           odcs.api_version),

-             endpoint_url)

+             "{0}api/{1}/composes/".format(self.server_url, odcs.api_version),

+             endpoint_url,

+         )

  

-         endpoint_url = odcs._make_endpoint('/composes/')

+         endpoint_url = odcs._make_endpoint("/composes/")

          self.assertEqual(

-             '{0}api/{1}/composes/'.format(self.server_url,

-                                           odcs.api_version),

-             endpoint_url)

+             "{0}api/{1}/composes/".format(self.server_url, odcs.api_version),

+             endpoint_url,

+         )

  

  

  class TestMakeRequest(unittest.TestCase):

      """Test ODCS._make_request"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

-         self.resource_path = 'composes/'

+         self.server_url = "http://localhost/"

+         self.resource_path = "composes/"

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_raise_error(self, requests):

          requests.get.return_value.status_code = 401

  

          self.odcs = ODCS(self.server_url)

-         r = self.odcs._make_request('get', self.resource_path)

+         r = self.odcs._make_request("get", self.resource_path)

  

          self.assertEqual(requests.get.return_value, r)

          requests.get.return_value.raise_for_status.assert_called_once()

  

-     @patch('odcs.client.odcs.requests')

-     @patch('odcs.client.odcs.HTTPKerberosAuth')

+     @patch("odcs.client.odcs.requests")

+     @patch("odcs.client.odcs.HTTPKerberosAuth")

      def test_with_kerberos_auth(self, HTTPKerberosAuth, requests):

          requests.get.return_value.status_code = 200

          expected_auth = HTTPKerberosAuth.return_value

  

          odcs = ODCS(self.server_url, auth_mech=AuthMech.Kerberos)

-         r = odcs._make_request('get', self.resource_path)

+         r = odcs._make_request("get", self.resource_path)

  

          self.assertEqual(requests.get.return_value, r)

          requests.get.assert_called_once_with(

-             odcs._make_endpoint(self.resource_path),

-             auth=expected_auth)

+             odcs._make_endpoint(self.resource_path), auth=expected_auth

+         )

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_with_openidc_auth(self, requests):

-         fake_openidc_token = '1234567890'

+         fake_openidc_token = "1234567890"

          requests.post.return_value.status_code = 200

  

-         odcs = ODCS(self.server_url,

-                     auth_mech=AuthMech.OpenIDC,

-                     openidc_token=fake_openidc_token)

-         r = odcs._make_request('post', self.resource_path, data={'id': 1})

+         odcs = ODCS(

+             self.server_url,

+             auth_mech=AuthMech.OpenIDC,

+             openidc_token=fake_openidc_token,

+         )

+         r = odcs._make_request("post", self.resource_path, data={"id": 1})

  

          self.assertEqual(requests.post.return_value, r)

          requests.post.assert_called_once_with(

              odcs._make_endpoint(self.resource_path),

-             data=json.dumps({'id': 1}),

+             data=json.dumps({"id": 1}),

              headers={

-                 'Authorization': 'Bearer {0}'.format(fake_openidc_token),

-                 'Content-Type': 'application/json'

-             })

+                 "Authorization": "Bearer {0}".format(fake_openidc_token),

+                 "Content-Type": "application/json",

+             },

+         )

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_with_ssl_auth(self, requests):

          requests.post.return_value.status_code = 200

  

-         odcs = ODCS(self.server_url,

-                     auth_mech=AuthMech.SSL,

-                     ssl_cert="./ssl.crt", ssl_key="./ssl.key")

-         r = odcs._make_request('post', self.resource_path, data={'id': 1})

+         odcs = ODCS(

+             self.server_url,

+             auth_mech=AuthMech.SSL,

+             ssl_cert="./ssl.crt",

+             ssl_key="./ssl.key",

+         )

+         r = odcs._make_request("post", self.resource_path, data={"id": 1})

  

          self.assertEqual(requests.post.return_value, r)

          requests.post.assert_called_once_with(

              odcs._make_endpoint(self.resource_path),

-             data=json.dumps({'id': 1}),

+             data=json.dumps({"id": 1}),

              cert=("./ssl.crt", "./ssl.key"),

-             headers={

-                 'Content-Type': 'application/json'

-             })

+             headers={"Content-Type": "application/json"},

+         )

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_do_not_verify_ssl(self, requests):

          requests.post.return_value.status_code = 200

  

          odcs = ODCS(self.server_url, verify_ssl=False)

-         r = odcs._make_request('post', self.resource_path)

+         r = odcs._make_request("post", self.resource_path)

  

          self.assertEqual(requests.post.return_value, r)

          requests.post.assert_called_once_with(

-             odcs._make_endpoint(self.resource_path),

-             verify=False)

+             odcs._make_endpoint(self.resource_path), verify=False

+         )

  

  

  class TestGetCompose(unittest.TestCase):

      """Test ODCS.get_compose"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

          self.odcs = ODCS(self.server_url)

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_get_compose(self, requests):

          fake_compose = {

-             'flags': [],

-             'id': 1,

-             'owner': 'Unknown',

-             'result_repo': 'http://odcs.host.qe.eng.pek2.redhat.com/composes/latest-odcs-1-1/compose/Temporary',

-             'source': 'cf-1.0-rhel-5',

-             'source_type': 1,

-             'state': 4,

-             'state_name': 'failed',

-             'time_done': '2017-07-11T13:05:40Z',

-             'time_removed': None,

-             'time_submitted': '2017-07-11T13:05:40Z',

-             'time_to_expire': '2017-07-12T13:05:40Z'

+             "flags": [],

+             "id": 1,

+             "owner": "Unknown",

+             "result_repo": "http://odcs.host.qe.eng.pek2.redhat.com/composes/latest-odcs-1-1/compose/Temporary",

+             "source": "cf-1.0-rhel-5",

+             "source_type": 1,

+             "state": 4,

+             "state_name": "failed",

+             "time_done": "2017-07-11T13:05:40Z",

+             "time_removed": None,

+             "time_submitted": "2017-07-11T13:05:40Z",

+             "time_to_expire": "2017-07-12T13:05:40Z",

          }

          requests.get = Mock()

          requests.get.return_value.status_code = 200
@@ -185,23 +196,23 @@ 

  

          self.assertEqual(fake_compose, compose)

          requests.get.assert_called_once_with(

-             '{0}api/{1}/composes/1'.format(

-                 self.server_url, self.odcs.api_version))

+             "{0}api/{1}/composes/1".format(self.server_url, self.odcs.api_version)

+         )

  

  

  class TestDeleteCompose(unittest.TestCase):

      """Test ODCS.delete_compose"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

          self.odcs = ODCS(self.server_url)

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_compose_id_not_found(self, requests):

          fake_response = {

-             'status': 404,

-             'error': 'Not Found',

-             'message': 'No such compose found.',

+             "status": 404,

+             "error": "Not Found",

+             "message": "No such compose found.",

          }

          requests.delete.return_value.status_code = 404

          requests.delete.return_value.json.return_value = fake_response
@@ -210,11 +221,11 @@ 

  

          requests.delete.return_value.raise_for_status.assert_called_once()

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_delete_compose(self, requests):

          fake_response = {

-             'status': 202,

-             'message': 'The delete request for compose (id=1) has been accepted'

+             "status": 202,

+             "message": "The delete request for compose (id=1) has been accepted",

          }

          requests.delete.return_value.status_code = 202

          requests.delete.return_value.json.return_value = fake_response
@@ -222,117 +233,120 @@ 

          r = self.odcs.delete_compose(1)

  

          self.assertEqual(fake_response, r)

-         requests.delete.assert_called_once_with(

-             self.odcs._make_endpoint('composes/1'))

+         requests.delete.assert_called_once_with(self.odcs._make_endpoint("composes/1"))

  

  

- @patch('odcs.client.odcs.requests')

+ @patch("odcs.client.odcs.requests")

  class TestNewCompose(unittest.TestCase):

      """Test ODCS.new_compose"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

          self.odcs = ODCS(self.server_url)

  

      def test_create_a_new_compose(self, requests):

          fake_new_compose = {

-             'flags': [],

-             'id': 7,

-             'owner': 'Unknown',

-             'result_repo': 'http://odcs.host.qe.eng.pek2.redhat.com/composes/latest-odcs-7-1/compose/Temporary',

-             'source': 'cf-1.0-rhel-5',

-             'source_type': 1,

-             'koji_event': 123456,

-             'state': 0,

-             'state_name': 'wait',

-             'time_done': None,

-             'time_removed': None,

-             'time_submitted': '2017-07-21T03:33:43Z',

-             'time_to_expire': '2017-07-22T03:33:43Z'

+             "flags": [],

+             "id": 7,

+             "owner": "Unknown",

+             "result_repo": "http://odcs.host.qe.eng.pek2.redhat.com/composes/latest-odcs-7-1/compose/Temporary",

+             "source": "cf-1.0-rhel-5",

+             "source_type": 1,

+             "koji_event": 123456,

+             "state": 0,

+             "state_name": "wait",

+             "time_done": None,

+             "time_removed": None,

+             "time_submitted": "2017-07-21T03:33:43Z",

+             "time_to_expire": "2017-07-22T03:33:43Z",

          }

          requests.post.return_value.status_code = 200

          requests.post.return_value.json.return_value = fake_new_compose

  

-         new_compose = self.odcs.new_compose('cf-1.0-rhel-5',

-                                             'tag',

-                                             packages=['libdnet'],

-                                             sigkeys=['123', '456'],

-                                             koji_event=123456,

-                                             results=["boot.iso"])

+         new_compose = self.odcs.new_compose(

+             "cf-1.0-rhel-5",

+             "tag",

+             packages=["libdnet"],

+             sigkeys=["123", "456"],

+             koji_event=123456,

+             results=["boot.iso"],

+         )

  

          self.assertEqual(fake_new_compose, new_compose)

          requests.post.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             data=json.dumps({

-                 'source': {'source': 'cf-1.0-rhel-5',

-                            'type': 'tag',

-                            'packages': ['libdnet'],

-                            'sigkeys': ['123', '456'],

-                            'koji_event': 123456},

-                 'results': ['boot.iso'],

-             }),

-             headers={'Content-Type': 'application/json'}

+             self.odcs._make_endpoint("composes/"),

+             data=json.dumps(

+                 {

+                     "source": {

+                         "source": "cf-1.0-rhel-5",

+                         "type": "tag",

+                         "packages": ["libdnet"],

+                         "sigkeys": ["123", "456"],

+                         "koji_event": 123456,

+                     },

+                     "results": ["boot.iso"],

+                 }

+             ),

+             headers={"Content-Type": "application/json"},

          )

  

      def test_request_compose_source_tag(self, requests):

          source = ComposeSourceTag("f32")

          self.odcs.request_compose(source)

          requests.post.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             data=json.dumps({

-                 'source': {'source': 'f32',

-                            'type': 'tag'},

-             }),

-             headers={'Content-Type': 'application/json'}

+             self.odcs._make_endpoint("composes/"),

+             data=json.dumps({"source": {"source": "f32", "type": "tag"}}),

+             headers={"Content-Type": "application/json"},

          )

  

      def test_request_compose_source_module(self, requests):

          source = ComposeSourceModule(["testmodule:master", "foo:bar"])

          self.odcs.request_compose(source)

          requests.post.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             data=json.dumps({

-                 'source': {'source': 'testmodule:master foo:bar',

-                            'type': 'module'},

-             }),

-             headers={'Content-Type': 'application/json'}

+             self.odcs._make_endpoint("composes/"),

+             data=json.dumps(

+                 {"source": {"source": "testmodule:master foo:bar", "type": "module"}}

+             ),

+             headers={"Content-Type": "application/json"},

          )

  

      def test_request_compose_source_pulp(self, requests):

          source = ComposeSourcePulp(["content-set1", "content-set2"])

          self.odcs.request_compose(source)

          requests.post.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             data=json.dumps({

-                 'source': {'source': 'content-set1 content-set2',

-                            'type': 'pulp'},

-             }),

-             headers={'Content-Type': 'application/json'}

+             self.odcs._make_endpoint("composes/"),

+             data=json.dumps(

+                 {"source": {"source": "content-set1 content-set2", "type": "pulp"}}

+             ),

+             headers={"Content-Type": "application/json"},

          )

  

      def test_request_compose_source_raw_config(self, requests):

          source = ComposeSourceRawConfig("name", "commit")

          self.odcs.request_compose(source)

          requests.post.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             data=json.dumps({

-                 'source': {'source': 'name#commit',

-                            'type': 'raw_config'},

-             }),

-             headers={'Content-Type': 'application/json'}

+             self.odcs._make_endpoint("composes/"),

+             data=json.dumps(

+                 {"source": {"source": "name#commit", "type": "raw_config"}}

+             ),

+             headers={"Content-Type": "application/json"},

          )

  

      def test_request_compose_source_build(self, requests):

          source = ComposeSourceBuild(["foo-1-1", "bar-1-1"])

          self.odcs.request_compose(source)

          requests.post.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             data=json.dumps({

-                 'source': {'source': '',

-                            'type': 'build',

-                            'builds': ['foo-1-1', 'bar-1-1']},

-             }),

-             headers={'Content-Type': 'application/json'}

+             self.odcs._make_endpoint("composes/"),

+             data=json.dumps(

+                 {

+                     "source": {

+                         "source": "",

+                         "type": "build",

+                         "builds": ["foo-1-1", "bar-1-1"],

+                     },

+                 }

+             ),

+             headers={"Content-Type": "application/json"},

          )

  

  
@@ -340,24 +354,24 @@ 

      """Test ODCS.renew_compose"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

          self.odcs = ODCS(self.server_url)

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_renew_a_compose(self, requests):

          fake_renew_compose = {

-             'flags': [],

-             'id': 7,

-             'owner': 'Unknown',

-             'result_repo': 'http://odcs.host.qe.eng.pek2.redhat.com/composes/latest-odcs-7-1/compose/Temporary',

-             'source': 'cf-1.0-rhel-5',

-             'source_type': 1,

-             'state': 0,

-             'state_name': 'wait',

-             'time_done': None,

-             'time_removed': None,

-             'time_submitted': '2017-07-21T03:33:43Z',

-             'time_to_expire': '2017-07-22T03:33:43Z'

+             "flags": [],

+             "id": 7,

+             "owner": "Unknown",

+             "result_repo": "http://odcs.host.qe.eng.pek2.redhat.com/composes/latest-odcs-7-1/compose/Temporary",

+             "source": "cf-1.0-rhel-5",

+             "source_type": 1,

+             "state": 0,

+             "state_name": "wait",

+             "time_done": None,

+             "time_removed": None,

+             "time_submitted": "2017-07-21T03:33:43Z",

+             "time_to_expire": "2017-07-22T03:33:43Z",

          }

          requests.patch.return_value.status_code = 200

          requests.patch.return_value.json.return_value = fake_renew_compose
@@ -366,213 +380,199 @@ 

  

          self.assertEqual(fake_renew_compose, r)

          requests.patch.assert_called_once_with(

-             self.odcs._make_endpoint('composes/6'),

-             data=json.dumps({'seconds-to-live': 60}),

-             headers={'Content-Type': 'application/json'})

+             self.odcs._make_endpoint("composes/6"),

+             data=json.dumps({"seconds-to-live": 60}),

+             headers={"Content-Type": "application/json"},

+         )

  

  

  class TestFindComposes(unittest.TestCase):

      """Test ODCS.find_composes"""

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

          self.odcs = ODCS(self.server_url)

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_find_composes_without_pagination(self, requests):

          fake_found_composes = {

-             'items': [

+             "items": [

                  {

-                     'flags': [],

-                     'id': 1,

-                     'owner': 'Unknown',

-                     'result_repo': 'http://localhost/composes/latest-odcs-1-1/compose/Temporary',

-                     'source': 'cf-1.0-rhel-5',

-                     'source_type': 1,

-                     'state': 4,

-                     'state_name': 'failed',

-                     'time_done': '2017-07-11T13:05:40Z',

-                     'time_removed': None,

-                     'time_submitted': '2017-07-11T13:05:40Z',

-                     'time_to_expire': '2017-07-12T13:05:40Z'

+                     "flags": [],

+                     "id": 1,

+                     "owner": "Unknown",

+                     "result_repo": "http://localhost/composes/latest-odcs-1-1/compose/Temporary",

+                     "source": "cf-1.0-rhel-5",

+                     "source_type": 1,

+                     "state": 4,

+                     "state_name": "failed",

+                     "time_done": "2017-07-11T13:05:40Z",

+                     "time_removed": None,

+                     "time_submitted": "2017-07-11T13:05:40Z",

+                     "time_to_expire": "2017-07-12T13:05:40Z",

                  },

                  {

-                     'flags': [],

-                     'id': 2,

-                     'owner': 'Unknown',

-                     'result_repo': 'http://localhost/composes/latest-odcs-2-1/compose/Temporary',

-                     'source': 'cf-1.0-rhel-5',

-                     'source_type': 1,

-                     'state': 4,

-                     'state_name': 'failed',

-                     'time_done': '2017-07-11T13:07:42Z',

-                     'time_removed': None,

-                     'time_submitted': '2017-07-11T13:07:41Z',

-                     'time_to_expire': '2017-07-12T13:07:41Z'

+                     "flags": [],

+                     "id": 2,

+                     "owner": "Unknown",

+                     "result_repo": "http://localhost/composes/latest-odcs-2-1/compose/Temporary",

+                     "source": "cf-1.0-rhel-5",

+                     "source_type": 1,

+                     "state": 4,

+                     "state_name": "failed",

+                     "time_done": "2017-07-11T13:07:42Z",

+                     "time_removed": None,

+                     "time_submitted": "2017-07-11T13:07:41Z",

+                     "time_to_expire": "2017-07-12T13:07:41Z",

                  },

              ],

-             'meta': {

-                 'page': 1,

-                 'pages': 1,

-             }

+             "meta": {"page": 1, "pages": 1},

          }

          requests.get.return_value.status_code = 200

          requests.get.return_value.json.return_value = fake_found_composes

  

-         r = self.odcs.find_composes(owner='unknown',

-                                     source_type='tag')

+         r = self.odcs.find_composes(owner="unknown", source_type="tag")

  

          self.assertEqual(fake_found_composes, r)

          requests.get.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             params={'owner': 'unknown', 'source_type': 'tag'})

+             self.odcs._make_endpoint("composes/"),

+             params={"owner": "unknown", "source_type": "tag"},

+         )

  

-     @patch('odcs.client.odcs.requests')

+     @patch("odcs.client.odcs.requests")

      def test_find_composes_the_second_page(self, requests):

          fake_found_composes = {

-             'items': [

+             "items": [

                  {

-                     'flags': [],

-                     'id': 1,

-                     'owner': 'Unknown',

-                     'result_repo': 'http://localhost/composes/latest-odcs-1-1/compose/Temporary',

-                     'source': 'cf-1.0-rhel-5',

-                     'source_type': 1,

-                     'state': 4,

-                     'state_name': 'failed',

-                     'time_done': '2017-07-11T13:05:40Z',

-                     'time_removed': None,

-                     'time_submitted': '2017-07-11T13:05:40Z',

-                     'time_to_expire': '2017-07-12T13:05:40Z'

+                     "flags": [],

+                     "id": 1,

+                     "owner": "Unknown",

+                     "result_repo": "http://localhost/composes/latest-odcs-1-1/compose/Temporary",

+                     "source": "cf-1.0-rhel-5",

+                     "source_type": 1,

+                     "state": 4,

+                     "state_name": "failed",

+                     "time_done": "2017-07-11T13:05:40Z",

+                     "time_removed": None,

+                     "time_submitted": "2017-07-11T13:05:40Z",

+                     "time_to_expire": "2017-07-12T13:05:40Z",

                  },

                  {

-                     'flags': [],

-                     'id': 2,

-                     'owner': 'Unknown',

-                     'result_repo': 'http://localhost/composes/latest-odcs-2-1/compose/Temporary',

-                     'source': 'cf-1.0-rhel-5',

-                     'source_type': 1,

-                     'state': 4,

-                     'state_name': 'failed',

-                     'time_done': '2017-07-11T13:07:42Z',

-                     'time_removed': None,

-                     'time_submitted': '2017-07-11T13:07:41Z',

-                     'time_to_expire': '2017-07-12T13:07:41Z'

+                     "flags": [],

+                     "id": 2,

+                     "owner": "Unknown",

+                     "result_repo": "http://localhost/composes/latest-odcs-2-1/compose/Temporary",

+                     "source": "cf-1.0-rhel-5",

+                     "source_type": 1,

+                     "state": 4,

+                     "state_name": "failed",

+                     "time_done": "2017-07-11T13:07:42Z",

+                     "time_removed": None,

+                     "time_submitted": "2017-07-11T13:07:41Z",

+                     "time_to_expire": "2017-07-12T13:07:41Z",

                  },

              ],

-             'meta': {

-                 'page': 1,

-                 'pages': 1,

-             }

+             "meta": {"page": 1, "pages": 1},

          }

          requests.get.return_value.status_code = 200

          requests.get.return_value.json.return_value = fake_found_composes

  

-         r = self.odcs.find_composes(owner='unknown',

-                                     source_type='tag',

-                                     page=2)

+         r = self.odcs.find_composes(owner="unknown", source_type="tag", page=2)

  

          self.assertEqual(fake_found_composes, r)

          requests.get.assert_called_once_with(

-             self.odcs._make_endpoint('composes/'),

-             params={'owner': 'unknown', 'source_type': 'tag', 'page': 2})

+             self.odcs._make_endpoint("composes/"),

+             params={"owner": "unknown", "source_type": "tag", "page": 2},

+         )

  

  

- @patch('time.sleep')

- @patch('odcs.client.odcs.ODCS.get_compose')

+ @patch("time.sleep")

+ @patch("odcs.client.odcs.ODCS.get_compose")

  class TestWaitForCompose(unittest.TestCase):

      """Test ODCS.wait_for_compose"""

  

      _TIME_TMP_VAR = 0

  

      def setUp(self):

-         self.server_url = 'http://localhost/'

+         self.server_url = "http://localhost/"

          self.odcs = ODCS(self.server_url)

  

      def test_wait_for_compose(self, get_compose, sleep):

          for state in ["done", "removed", "failed"]:

              get_compose.reset_mock()

              sleep.reset_mock()

-             get_compose.side_effect = [{"state_name": "wait"},

-                                        {"state_name": "generating"},

-                                        {"state_name": state}]

+             get_compose.side_effect = [

+                 {"state_name": "wait"},

+                 {"state_name": "generating"},

+                 {"state_name": state},

+             ]

              self.odcs.wait_for_compose(1)

  

-             self.assertEqual(sleep.mock_calls,

-                              [mock.call(1), mock.call(2)])

-             self.assertEqual(get_compose.mock_calls,

-                              [mock.call(1)] * 3)

+             self.assertEqual(sleep.mock_calls, [mock.call(1), mock.call(2)])

+             self.assertEqual(get_compose.mock_calls, [mock.call(1)] * 3)

  

-     @patch('time.time')

+     @patch("time.time")

      def test_wait_for_compose_timeout(self, time_travel, get_compose, sleep):

          get_compose.side_effect = [{"state_name": "wait"}] * 2

          time_travel.side_effect = [1, 301]

          self.assertRaises(RuntimeError, self.odcs.wait_for_compose, 1)

  

-     @patch('time.time')

-     def test_wait_for_compose_elapsed_close_to_timeout(

-             self, _time, get_compose, sleep):

+     @patch("time.time")

+     def test_wait_for_compose_elapsed_close_to_timeout(self, _time, get_compose, sleep):

          TestWaitForCompose._TIME_TMP_VAR = 0

  

          # Replace time.sleep() method which does not sleep, but updates the

          # _TIME_TMP_VAR with number of seconds it would sleep.

          def mocked_sleep(seconds):

              TestWaitForCompose._TIME_TMP_VAR += seconds

+ 

          sleep.side_effect = mocked_sleep

  

          # Replace time.time() method to return _TIME_TMP_VAR.

          def mocked_time():

              return TestWaitForCompose._TIME_TMP_VAR

+ 

          _time.side_effect = mocked_time

  

          get_compose.side_effect = [{"state_name": "wait"}] * 10

          self.assertRaises(RuntimeError, self.odcs.wait_for_compose, 1, 10)

  

-         self.assertEqual(sleep.mock_calls,

-                          [mock.call(1), mock.call(2), mock.call(3), mock.call(4)])

+         self.assertEqual(

+             sleep.mock_calls, [mock.call(1), mock.call(2), mock.call(3), mock.call(4)]

+         )

  

      @patch("odcs.client.odcs.ComposeLog.read")

      def test_wait_for_compose_watch_logs(self, log_read, get_compose, sleep):

          get_compose.side_effect = [

-             {

-                 "state_name": "wait",

-                 "toplevel_url": "http://localhost/composes/odcs-1"

-             },

+             {"state_name": "wait", "toplevel_url": "http://localhost/composes/odcs-1"},

              {

                  "state_name": "generating",

-                 "toplevel_url": "http://localhost/composes/odcs-1"

-             },

-             {

-                 "state_name": "done",

-                 "toplevel_url": "http://localhost/composes/odcs-1"

+                 "toplevel_url": "http://localhost/composes/odcs-1",

              },

+             {"state_name": "done", "toplevel_url": "http://localhost/composes/odcs-1"},

          ]

          log_read.side_effect = [None, "line\n"]

          self.odcs.wait_for_compose(1, watch_logs=True)

  

-         self.assertEqual(sleep.mock_calls,

-                          [mock.call(10)])

-         self.assertEqual(get_compose.mock_calls,

-                          [mock.call(1)] * 3)

+         self.assertEqual(sleep.mock_calls, [mock.call(10)])

+         self.assertEqual(get_compose.mock_calls, [mock.call(1)] * 3)

          self.assertEqual(len(log_read.mock_calls), 2)

  

  

- @patch('odcs.client.odcs.requests')

+ @patch("odcs.client.odcs.requests")

  class TestComposeLog(unittest.TestCase):

      """Test ODCS.wait_for_compose"""

  

      def setUp(self):

-         compose = {

-             'toplevel_url': 'http://localhost/composes/odcs-1'

-         }

+         compose = {"toplevel_url": "http://localhost/composes/odcs-1"}

          self.compose_log = ComposeLog(compose)

  

      def test_compose_log_404(self, requests):

          requests.get.return_value.status_code = 404

          ret = self.compose_log.read()

          requests.get.assert_called_once_with(

-             'http://localhost/composes/odcs-1/pungi-stderr.log',

-             headers={'Range': 'bytes=0-'}

+             "http://localhost/composes/odcs-1/pungi-stderr.log",

+             headers={"Range": "bytes=0-"},

          )

          self.assertEqual(ret, None)

  
@@ -581,7 +581,7 @@ 

              MagicMock(status_code=200, text="line\n"),

              MagicMock(status_code=200, text="another line\n"),

              MagicMock(status_code=416, text=""),

-             MagicMock(status_code=200, text="another line\n")

+             MagicMock(status_code=200, text="another line\n"),

          ]

          requests.get.side_effect = responses

          length = 0
@@ -589,8 +589,8 @@ 

              ret = self.compose_log.read()

              self.assertEqual(ret, m.text)

              requests.get.assert_called_once_with(

-                 'http://localhost/composes/odcs-1/pungi-stderr.log',

-                 headers={'Range': 'bytes=%d-' % length}

+                 "http://localhost/composes/odcs-1/pungi-stderr.log",

+                 headers={"Range": "bytes=%d-" % length},

              )

              requests.get.reset_mock()

              length += len(ret)

file modified
+1 -1
@@ -1,1 +1,1 @@ 

- __path__ = __import__('pkgutil').extend_path(__path__, __name__)

+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)

file modified
+1 -2
@@ -51,8 +51,7 @@ 

      "pungi_compose": PungiSourceType.PUNGI_COMPOSE,

  }

  

- INVERSE_PUNGI_SOURCE_TYPE_NAMES = {

-     v: k for k, v in PUNGI_SOURCE_TYPE_NAMES.items()}

+ INVERSE_PUNGI_SOURCE_TYPE_NAMES = {v: k for k, v in PUNGI_SOURCE_TYPE_NAMES.items()}

  

  COMPOSE_STATES = {

      # Compose is waiting to be generated

file modified
+49 -45
@@ -6,18 +6,18 @@ 

  # declared properly somewhere/somehow

  confdir = path.abspath(path.dirname(__file__))

  # use parent dir as dbdir else fallback to current dir

- dbdir = path.abspath(path.join(confdir, '../..')) if confdir.endswith('conf') \

-     else confdir

+ dbdir = (

+     path.abspath(path.join(confdir, "../..")) if confdir.endswith("conf") else confdir

+ )

  

  

  class BaseConfiguration(object):

      # Make this random (used to generate session keys)

-     SECRET_KEY = '74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0'

-     SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(path.join(

-         dbdir, 'odcs.db'))

+     SECRET_KEY = "74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0"

+     SQLALCHEMY_DATABASE_URI = "sqlite:///{0}".format(path.join(dbdir, "odcs.db"))

      SQLALCHEMY_TRACK_MODIFICATIONS = False

  

-     HOST = '127.0.0.1'

+     HOST = "127.0.0.1"

      PORT = 5005

  

      DEBUG = False
@@ -26,13 +26,13 @@ 

      NET_RETRY_INTERVAL = 30

  

      # Available backends are: console, file, journal.

-     LOG_BACKEND = 'journal'

+     LOG_BACKEND = "journal"

  

      # Path to log file when LOG_BACKEND is set to "file".

-     LOG_FILE = 'odcs.log'

+     LOG_FILE = "odcs.log"

  

      # Available log levels are: debug, info, warn, error.

-     LOG_LEVEL = 'info'

+     LOG_LEVEL = "info"

  

      SSL_ENABLED = False

  
@@ -60,37 +60,37 @@ 

      #   }

      # }

      ALLOWED_CLIENTS = {

-         'groups': {},

-         'users': {},

+         "groups": {},

+         "users": {},

      }

  

      # Users in ADMINS are granted with admin permission.

      ADMINS = {

-         'groups': [],

-         'users': [],

+         "groups": [],

+         "users": [],

      }

  

      # OIDC base namespace

      # See also section pagure.io/odcs in

      # https://fedoraproject.org/wiki/Infrastructure/Authentication

-     OIDC_BASE_NAMESPACE = 'https://pagure.io/odcs/'

+     OIDC_BASE_NAMESPACE = "https://pagure.io/odcs/"

  

      # Select which authentication backend to work with. There are 3 choices

      # noauth: no authentication is enabled. Useful for development particularly.

      # kerberos: Kerberos authentication is enabled.

      # openidc: OpenIDC authentication is enabled.

-     AUTH_BACKEND = ''

+     AUTH_BACKEND = ""

  

      # Used for Kerberos authentication and to query user's groups.

      # Format: ldap://hostname[:port]

      # For example: ldap://ldap.example.com/

-     AUTH_LDAP_SERVER = ''

+     AUTH_LDAP_SERVER = ""

  

      # Group base to query groups from LDAP server.

      # Generally, it would be, for example, ou=groups,dc=example,dc=com

-     AUTH_LDAP_GROUP_BASE = ''

+     AUTH_LDAP_GROUP_BASE = ""

  

-     AUTH_OPENIDC_USERINFO_URI = 'https://id.fedoraproject.org/openidc/UserInfo'

+     AUTH_OPENIDC_USERINFO_URI = "https://id.fedoraproject.org/openidc/UserInfo"

  

      # Scope requested from Fedora Infra for permission of submitting request to

      # run a new compose.
@@ -103,25 +103,25 @@ 

      # https://pagure.io/odcs/renew-compose

      # https://pagure.io/odcs/delete-compose

      AUTH_OPENIDC_REQUIRED_SCOPES = [

-         'openid',

-         'https://id.fedoraproject.org/scope/groups',

+         "openid",

+         "https://id.fedoraproject.org/scope/groups",

      ]

  

      # Select backend where message will be sent to. Currently, umb is supported

      # which means the Unified Message Bus.

-     MESSAGING_BACKEND = ''  # fedora-messaging or umb

+     MESSAGING_BACKEND = ""  # fedora-messaging or umb

  

      # List of broker URLs. Each of them is a string consisting of domain and

      # optiona port.

      MESSAGING_BROKER_URLS = []

  

      # Path to certificate file used to authenticate ODCS by messaging broker.

-     MESSAGING_CERT_FILE = ''

+     MESSAGING_CERT_FILE = ""

  

      # Path to private key file used to authenticate ODCS by messaging broker.

-     MESSAGING_KEY_FILE = ''

+     MESSAGING_KEY_FILE = ""

  

-     MESSAGING_CA_CERT = ''

+     MESSAGING_CA_CERT = ""

  

      # The MESSAGING_TOPIC is used as topic for messages sent when compose

      # state is change.
@@ -130,8 +130,8 @@ 

      # composes.

      # For umb, it is the ActiveMQ virtual topic e.g.

      # VirtualTopic.eng.odcs.state.changed.

-     MESSAGING_TOPIC = ''

-     INTERNAL_MESSAGING_TOPIC = ''

+     MESSAGING_TOPIC = ""

+     INTERNAL_MESSAGING_TOPIC = ""

  

      # Definitions of raw Pungi configs for "raw_config" source_type.

      # RAW_CONFIG_URLS = {
@@ -160,8 +160,8 @@ 

  

  class DevConfiguration(BaseConfiguration):

      DEBUG = True

-     LOG_BACKEND = 'console'

-     LOG_LEVEL = 'debug'

+     LOG_BACKEND = "console"

+     LOG_LEVEL = "debug"

  

      # Global network-related values, in seconds

      NET_TIMEOUT = 5
@@ -171,27 +171,29 @@ 

          makedirs(TARGET_DIR, mode=0o775)

      except OSError as ex:

          if ex.errno != errno.EEXIST:

-             raise RuntimeError("Can't create compose target dir %s: %s" % (TARGET_DIR, ex.strerror))

+             raise RuntimeError(

+                 "Can't create compose target dir %s: %s" % (TARGET_DIR, ex.strerror)

+             )

  

-     PUNGI_CONF_PATH = path.join(confdir, 'pungi.conf')

-     AUTH_BACKEND = 'noauth'

-     AUTH_OPENIDC_USERINFO_URI = 'https://iddev.fedorainfracloud.org/openidc/UserInfo'

+     PUNGI_CONF_PATH = path.join(confdir, "pungi.conf")

+     AUTH_BACKEND = "noauth"

+     AUTH_OPENIDC_USERINFO_URI = "https://iddev.fedorainfracloud.org/openidc/UserInfo"

  

-     KOJI_PROFILE = 'stg'

+     KOJI_PROFILE = "stg"

  

-     RAW_CONFIG_WRAPPER_CONF_PATH = path.join(confdir, 'raw_config_wrapper.conf')

+     RAW_CONFIG_WRAPPER_CONF_PATH = path.join(confdir, "raw_config_wrapper.conf")

  

  

  class TestConfiguration(BaseConfiguration):

-     LOG_BACKEND = 'console'

-     LOG_LEVEL = 'debug'

+     LOG_BACKEND = "console"

+     LOG_LEVEL = "debug"

      DEBUG = True

  

      # Use in-memory sqlite db to make tests fast.

-     SQLALCHEMY_DATABASE_URI = 'sqlite://'

+     SQLALCHEMY_DATABASE_URI = "sqlite://"

  

-     PUNGI_CONF_PATH = path.join(confdir, 'pungi.conf')

-     RAW_CONFIG_WRAPPER_CONF_PATH = path.join(confdir, 'raw_config_wrapper.conf')

+     PUNGI_CONF_PATH = path.join(confdir, "pungi.conf")

+     RAW_CONFIG_WRAPPER_CONF_PATH = path.join(confdir, "raw_config_wrapper.conf")

      # Global network-related values, in seconds

      NET_TIMEOUT = 0

      NET_RETRY_INTERVAL = 0
@@ -200,14 +202,16 @@ 

          makedirs(TARGET_DIR, mode=0o775)

      except OSError as ex:

          if ex.errno != errno.EEXIST:

-             raise RuntimeError("Can't create compose target dir %s: %s" % (TARGET_DIR, ex.strerror))

+             raise RuntimeError(

+                 "Can't create compose target dir %s: %s" % (TARGET_DIR, ex.strerror)

+             )

  

-     AUTH_BACKEND = 'noauth'

-     AUTH_LDAP_SERVER = 'ldap://ldap.example.com'

-     AUTH_LDAP_GROUP_BASE = 'ou=groups,dc=example,dc=com'

+     AUTH_BACKEND = "noauth"

+     AUTH_LDAP_SERVER = "ldap://ldap.example.com"

+     AUTH_LDAP_GROUP_BASE = "ou=groups,dc=example,dc=com"

  

-     MESSAGING_BACKEND = 'rhmsg'

-     KOJI_PROFILE = 'koji'

+     MESSAGING_BACKEND = "rhmsg"

+     KOJI_PROFILE = "koji"

  

  

  class ProdConfiguration(BaseConfiguration):

@@ -16,6 +16,7 @@ 

      """"

      Raised when compose check fails.

      """

+ 

      pass

  

  
@@ -25,7 +26,10 @@ 

  

      This is not real Compose CI, but rather basic sanity check.

      """

-     def __init__(self, path, target, allow_unsigned=False, allow_finished_incomplete=False):

+ 

+     def __init__(

+         self, path, target, allow_unsigned=False, allow_finished_incomplete=False

+     ):

          """

          Creates new ComposeCheck instance.

  
@@ -56,7 +60,9 @@ 

          with open(status_path, "r") as f:

              status = f.readline()[:-1]

              if status not in allowed_statuses:

-                 err_msg = 'Compose is not in %s status.' % (" or ".join(allowed_statuses))

+                 err_msg = "Compose is not in %s status." % (

+                     " or ".join(allowed_statuses)

+                 )

                  raise ComposeCheckError(err_msg)

  

      def check_compose_info(self):
@@ -100,7 +106,6 @@ 

          target_dirname = os.path.dirname(self.target)

          while not os.path.exists(target_dirname):

              target_dirname = os.path.dirname(target_dirname)

-         target_stat = os.stat(target_dirname)

  

          for root, dirs, files in os.walk(self.path):

              for p in dirs + files:
@@ -110,9 +115,11 @@ 

                      continue

  

                  real_path = os.readlink(path)

-                 abspath = os.path.normpath(os.path.join(os.path.dirname(path), real_path))

+                 abspath = os.path.normpath(

+                     os.path.join(os.path.dirname(path), real_path)

+                 )

                  try:

-                     abspath_stat = os.stat(abspath)

+                     os.stat(abspath)

                  except Exception as e:

                      err_msg = "Symlink cannot be resolved: %s: %s." % (path, e)

                      raise ComposeCheckError(err_msg)
@@ -131,6 +138,7 @@ 

      """

      Contains methods and data to promote compose.

      """

+ 

      def __init__(self, compose, target):

          """

          Creates new ComposePromotion instance.
@@ -171,7 +179,9 @@ 

          print("Replacing %d symlinks with hardlinks." % len(self.symlinks))

          for symlink, hardlink_path in self.symlinks:

              real_path = os.readlink(symlink)

-             abspath = os.path.normpath(os.path.join(os.path.dirname(symlink), real_path))

+             abspath = os.path.normpath(

+                 os.path.join(os.path.dirname(symlink), real_path)

+             )

              try:

                  os.link(abspath, hardlink_path)

              except OSError as ex:
@@ -192,12 +202,19 @@ 

      parser = argparse.ArgumentParser(description="Promote ODCS compose.")

      parser.add_argument("compose", help="Path to compose to promote.")

      parser.add_argument("target", help="Path to target location")

-     parser.add_argument("--allow-unsigned", action="store_true",

-                         help="Allow unsigned RPMs.")

-     parser.add_argument("--allow-finished-incomplete", action="store_true",

-                         help="Allow compose in FINISHED_INCOMPLETE state.")

-     parser.add_argument("--no-checks", action="store_true",

-                     help="WARN: Promote the compose without any checks.")

+     parser.add_argument(

+         "--allow-unsigned", action="store_true", help="Allow unsigned RPMs."

+     )

+     parser.add_argument(

+         "--allow-finished-incomplete",

+         action="store_true",

+         help="Allow compose in FINISHED_INCOMPLETE state.",

+     )

+     parser.add_argument(

+         "--no-checks",

+         action="store_true",

+         help="WARN: Promote the compose without any checks.",

+     )

      args = parser.parse_args()

  

      args.compose = os.path.abspath(args.compose)
@@ -205,7 +222,11 @@ 

  

      if not args.no_checks:

          compose_check = ComposeCheck(

-             args.compose, args.target, args.allow_unsigned, args.allow_finished_incomplete)

+             args.compose,

+             args.target,

+             args.allow_unsigned,

+             args.allow_finished_incomplete,

+         )

          try:

              compose_check.run()

          except ComposeCheckError as e:
@@ -215,4 +236,3 @@ 

      print("Promoting compose")

      compose_promotion = ComposePromotion(args.compose, args.target)

      compose_promotion.promote()

- 

file modified
+4 -3
@@ -1,6 +1,7 @@ 

- #-*- coding: utf-8 -*-

+ # -*- coding: utf-8 -*-

  

  import logging

- logging.basicConfig(level='DEBUG')

  

- from odcs.server import app as application

+ logging.basicConfig(level="DEBUG")

+ 

+ from odcs.server import app as application  # noqa: E402, F401

@@ -20,6 +20,7 @@ 

      Context manager for tempfile.mkdtemp() so it's usable with "with"

      statement.

      """

+ 

      def __enter__(self):

          self.name = tempfile.mkdtemp()

          return self.name
@@ -30,25 +31,25 @@ 

  

  def get_oidc_token():

      if "stg" in odcs_api_url:

-         id_provider = 'https://id.stg.fedoraproject.org/openidc/'

+         id_provider = "https://id.stg.fedoraproject.org/openidc/"

      else:

-         id_provider = 'https://id.fedoraproject.org/openidc/'

+         id_provider = "https://id.fedoraproject.org/openidc/"

  

      # Get the auth token using the OpenID client.

      oidc = openidc_client.OpenIDCClient(

-         'odcs',

+         "odcs",

          id_provider,

-         {'Token': 'Token', 'Authorization': 'Authorization'},

-         'odcs-authorizer',

-         'notsecret',

+         {"Token": "Token", "Authorization": "Authorization"},

+         "odcs-authorizer",

+         "notsecret",

      )

  

      scopes = [

-         'openid',

-         'https://id.fedoraproject.org/scope/groups',

-         'https://pagure.io/odcs/new-compose',

-         'https://pagure.io/odcs/renew-compose',

-         'https://pagure.io/odcs/delete-compose',

+         "openid",

+         "https://id.fedoraproject.org/scope/groups",

+         "https://pagure.io/odcs/new-compose",

+         "https://pagure.io/odcs/renew-compose",

+         "https://pagure.io/odcs/delete-compose",

      ]

      try:

          token = oidc.get_token(scopes, new_token=True)
@@ -78,11 +79,13 @@ 

          base.reset(repos=True, goal=True, sack=True)

  

          # add a new repo requires an id, a conf object, and a baseurl

-         base.repos.add_new_repo('my_test', conf, baseurl=[repo_url])

+         base.repos.add_new_repo("my_test", conf, baseurl=[repo_url])

          base.fill_sack(load_system_repo=False)

  

          # Return available packages.

-         return [x.name for x in base.sack.query(flags=hawkey.IGNORE_EXCLUDES).available()]

+         return [

+             x.name for x in base.sack.query(flags=hawkey.IGNORE_EXCLUDES).available()

+         ]

  

  

  def check_compose(compose, source_type, source, packages, flags, arches=None):
@@ -146,7 +149,7 @@ 

          in format of "[0-9]+[s|S|m|M|h|H|d|D]", like "10m".

      """

      try:

-         regex = re.compile(r'(?P<num>\d+)[s|S|m|M|h|H|d|D]')

+         regex = re.compile(r"(?P<num>\d+)[s|S|m|M|h|H|d|D]")

          m = regex.match(time_threshold)

          num = int(m.group("num"))

      except Exception:
@@ -165,21 +168,34 @@ 

      start_time = datetime.strptime(compose["time_submitted"], time_format)

      end_time = datetime.strptime(compose["time_done"], time_format)

      spent_time = end_time - start_time

-     print("Time spent for generating compose (%s): %ss" % (compose["id"],

-           spent_time.total_seconds()))

-     assert spent_time <= threshold, \

-         "Spent time (%ss) over threshold (%s) for generating compose %s." % (

-                 spent_time.total_seconds(), time_threshold, compose["id"])

+     print(

+         "Time spent for generating compose (%s): %ss"

+         % (compose["id"], spent_time.total_seconds())

+     )

+     assert spent_time <= threshold, (

+         "Spent time (%ss) over threshold (%s) for generating compose %s."

+         % (spent_time.total_seconds(), time_threshold, compose["id"])

+     )

  

  

- def check_new_compose(source_type, source, packages, flags,

-                       sigkeys=None, arches=None, expected_state_reason=None,

-                       expected_packages=None, **kwargs):

+ def check_new_compose(

+     source_type,

+     source,

+     packages,

+     flags,

+     sigkeys=None,

+     arches=None,

+     expected_state_reason=None,

+     expected_packages=None,

+     **kwargs

+ ):

      """

      Submits new compose and checks the result.

      """

-     print("Submitting new compose request: %s %s, %r %r" % (

-           source_type, source, packages, flags))

+     print(

+         "Submitting new compose request: %s %s, %r %r"

+         % (source_type, source, packages, flags)

+     )

  

      try:

          compose = client.new_compose(
@@ -211,8 +227,7 @@ 

      return compose["id"]

  

  

- def check_renew_compose(compose_id, source_type, source, packages,

-                         flags, arches=None):

+ def check_renew_compose(compose_id, source_type, source, packages, flags, arches=None):

      """

      Renews the compose and checks the compose is renewed properly.

      """
@@ -239,7 +254,8 @@ 

          now = datetime.utcnow()

          compose = client.get_compose(compose_id)

          time_to_expire = datetime.strptime(

-             compose["time_to_expire"], "%Y-%m-%dT%H:%M:%SZ")

+             compose["time_to_expire"], "%Y-%m-%dT%H:%M:%SZ"

+         )

          if time_to_expire < now:

              break

          time.sleep(1)
@@ -256,67 +272,132 @@ 

      if with_large_tag_compose:

          # test with a compose with much more packages

          compose_id = check_new_compose(

-             "tag", "rhos-13.0-rhel-7-container-build", [], [],

+             "tag",

+             "rhos-13.0-rhel-7-container-build",

+             [],

+             [],

              sigkeys=["37017186", "FD431D51", "DB42A60E", ""],

-             arches=["x86_64", "ppc64le"])

+             arches=["x86_64", "ppc64le"],

+         )

          compose = client.get_compose(compose_id)

          check_compose_time(compose, "10m")

          check_delete_compose(compose_id)

-         check_renew_compose(compose_id, "tag", "rhos-13.0-rhel-7-container-build",

-                             [], [], arches=["x86_64", "ppc64le"])

+         check_renew_compose(

+             compose_id,

+             "tag",

+             "rhos-13.0-rhel-7-container-build",

+             [],

+             [],

+             arches=["x86_64", "ppc64le"],

+         )

      else:

          # this is a compose much smaller

          compose_id = check_new_compose(

-             "tag", "cf-1.0-rhel-5", ["gofer-package"], ["no_deps"],

-             arches=["x86_64", "ppc64"])

+             "tag",

+             "cf-1.0-rhel-5",

+             ["gofer-package"],

+             ["no_deps"],

+             arches=["x86_64", "ppc64"],

+         )

          compose = client.get_compose(compose_id)

          check_compose_time(compose, "5m")

          check_delete_compose(compose_id)

-         check_renew_compose(compose_id, "tag", "cf-1.0-rhel-5",

-                             ["gofer-package"], ["no_deps"], ["x86_64", "ppc64"])

+         check_renew_compose(

+             compose_id,

+             "tag",

+             "cf-1.0-rhel-5",

+             ["gofer-package"],

+             ["no_deps"],

+             ["x86_64", "ppc64"],

+         )

  

      # Check "tag" with "deps".

      check_new_compose("tag", "cf-1.0-rhel-5", ["gofer"], [], sigkeys=[""])

  

      # Check "tag" without "packages" - all packages in tag should be included.

      check_new_compose(

-         "tag", "cf-1.0-rhel-5", [], [],

+         "tag",

+         "cf-1.0-rhel-5",

+         [],

+         [],

          sigkeys=[""],

          expected_packages=[

-             'PyPAM', 'aeolus-audrey-agent', 'facter', 'gofer',

-             'gofer-package', 'gofer-system', 'gofer-virt', 'gofer-watchdog',

-             'help2man', 'katello-agent', 'libdnet', 'libdnet-devel',

-             'libdnet-progs', 'open-vm-tools', 'open-vm-tools-devel',

-             'open-vm-tools-libs', 'open-vm-toolsd', 'python-argparse',

-             'python-gofer', 'python-hashlib', 'python-httplib2',

-             'python-oauth2', 'python-qpid', 'python-saslwrapper',

-             'python-setuptools', 'python-ssl', 'python-uuid', 'rhev-agent',

-             'ruby-gofer', 'ruby-saslwrapper', 'saslwrapper',

-             'saslwrapper-devel'])

+             "PyPAM",

+             "aeolus-audrey-agent",

+             "facter",

+             "gofer",

+             "gofer-package",

+             "gofer-system",

+             "gofer-virt",

+             "gofer-watchdog",

+             "help2man",

+             "katello-agent",

+             "libdnet",

+             "libdnet-devel",

+             "libdnet-progs",

+             "open-vm-tools",

+             "open-vm-tools-devel",

+             "open-vm-tools-libs",

+             "open-vm-toolsd",

+             "python-argparse",

+             "python-gofer",

+             "python-hashlib",

+             "python-httplib2",

+             "python-oauth2",

+             "python-qpid",

+             "python-saslwrapper",

+             "python-setuptools",

+             "python-ssl",

+             "python-uuid",

+             "rhev-agent",

+             "ruby-gofer",

+             "ruby-saslwrapper",

+             "saslwrapper",

+             "saslwrapper-devel",

+         ],

+     )

  

      # Check unknown "tag".

      check_new_compose(

-         "tag", "unknown-tag", ["gofer-package"], [],

-         expected_state_reason="Unknown Koji tag")

+         "tag",

+         "unknown-tag",

+         ["gofer-package"],

+         [],

+         expected_state_reason="Unknown Koji tag",

+     )

  

      # Check "tag" with additional builds.

      compose_id = check_new_compose(

-         "tag", "cf-1.0-rhel-5", ["gofer-package", "tar"], ["no_deps"],

+         "tag",

+         "cf-1.0-rhel-5",

+         ["gofer-package", "tar"],

+         ["no_deps"],

          sigkeys=[""],

-         arches=["x86_64", "ppc64"], builds=["tar-1.26-29.el7"])

+         arches=["x86_64", "ppc64"],

+         builds=["tar-1.26-29.el7"],

+     )

  

      # Check "build".

      compose_id = check_new_compose(

-         "build", "", ["tar"], ["no_deps"],

-         arches=["x86_64", "ppc64"], builds=["tar-1.26-29.el7"])

+         "build",

+         "",

+         ["tar"],

+         ["no_deps"],

+         arches=["x86_64", "ppc64"],

+         builds=["tar-1.26-29.el7"],

+     )

  

      # Check "pulp".

      check_new_compose("pulp", "rhel-7-server-rpms rhel-server-rhscl-7-rpms", [], [])

  

      # Check unknown "pulp" content_set.

      check_new_compose(

-         "pulp", "rhel-7-server-rpms-unknown", [], [],

-         expected_state_reason="Failed to find")

+         "pulp",

+         "rhel-7-server-rpms-unknown",

+         [],

+         [],

+         expected_state_reason="Failed to find",

+     )

  

      # Check "module".

      check_new_compose("module", "postgresql:10", [], ["no_deps"], [""])
@@ -324,12 +405,11 @@ 

  

  if __name__ == "__main__":

      parser = argparse.ArgumentParser(description="Test ODCS deployment.")

-     parser.add_argument("odcs_api_url",

-                         help="URL of the ODCS instance to test")

-     parser.add_argument("profile",

-                         help="can be either \"redhat\" or \"fedora\"")

-     parser.add_argument("--fast-check", action="store_true",

-                         help="perform just a single compose check")

+     parser.add_argument("odcs_api_url", help="URL of the ODCS instance to test")

+     parser.add_argument("profile", help='can be either "redhat" or "fedora"')

+     parser.add_argument(

+         "--fast-check", action="store_true", help="perform just a single compose check"

+     )

      args = parser.parse_args()

  

      odcs_api_url = args.odcs_api_url
@@ -347,24 +427,22 @@ 

          sys.exit(2)

  

      client = odcs.client.odcs.ODCS(

-         odcs_api_url,

-         auth_mech=auth_mech,

-         openidc_token=token,

+         odcs_api_url, auth_mech=auth_mech, openidc_token=token,

      )

  

      if profile == "redhat":

          if fast_check:

              compose_id = check_new_compose(

-                 "pulp", "rhel-7-server-rpms rhel-server-rhscl-7-rpms", [], [])

+                 "pulp", "rhel-7-server-rpms rhel-server-rhscl-7-rpms", [], []

+             )

              sys.exit(0) if compose_id else sys.exit(1)

  

          with_large_tag_compose = True

-         if '.dev.' in odcs_api_url or '.qe.' in odcs_api_url:

+         if ".dev." in odcs_api_url or ".qe." in odcs_api_url:

              with_large_tag_compose = False

          check_redhat_deployment(with_large_tag_compose=with_large_tag_compose)

      else:

          if fast_check:

              print("Ignoring --fast-check option. Applicable only for redhat profile.")

-         compose_id = check_new_compose(

-             "module", "testmodule-master", [], ["no_deps"])

+         compose_id = check_new_compose("module", "testmodule-master", [], ["no_deps"])

          sys.exit(0) if compose_id else sys.exit(1)

file modified
+1 -1
@@ -1,1 +1,1 @@ 

- __path__ = __import__('pkgutil').extend_path(__path__, __name__)

+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)

file modified
+13 -15
@@ -36,9 +36,9 @@ 

  import pkg_resources

  

  try:

-     version = pkg_resources.get_distribution('odcs').version

+     version = pkg_resources.get_distribution("odcs").version

  except pkg_resources.DistributionNotFound:

-     version = 'unknown'

+     version = "unknown"

  

  app = Flask(__name__)

  app.wsgi_app = ReverseProxy(app.wsgi_app)
@@ -53,17 +53,15 @@ 

  login_manager = LoginManager()

  login_manager.init_app(app)

  

- from odcs.server import views # noqa

+ from odcs.server import views  # noqa

+ 

+ from odcs.server.auth import init_auth  # noqa

  

- from odcs.server.auth import init_auth # noqa

  init_auth(login_manager, conf.auth_backend)

  

  

  def json_error(status, error, message):

-     response = jsonify(

-         {'status': status,

-          'error': error,

-          'message': message})

+     response = jsonify({"status": status, "error": error, "message": message})

      response.status_code = status

      return response

  
@@ -71,35 +69,35 @@ 

  @app.errorhandler(NotFound)

  def notfound_error(e):

      """Flask error handler for NotFound exceptions"""

-     return json_error(404, 'Not Found', e.args[0])

+     return json_error(404, "Not Found", e.args[0])

  

  

  @app.errorhandler(Unauthorized)

  def unauthorized_error(e):

      """Flask error handler for Unauthorized exceptions"""

-     return json_error(401, 'Unauthorized', e.description)

+     return json_error(401, "Unauthorized", e.description)

  

  

  @app.errorhandler(Forbidden)

  def forbidden_error(e):

      """Flask error handler for Forbidden exceptions"""

-     return json_error(403, 'Forbidden', e.args[0])

+     return json_error(403, "Forbidden", e.args[0])

  

  

  @app.errorhandler(BadRequest)

  def badrequest_error(e):

      """Flask error handler for RuntimeError exceptions"""

-     return json_error(400, 'Bad Request', e.get_description())

+     return json_error(400, "Bad Request", e.get_description())

  

  

  @app.errorhandler(ValueError)

  def validationerror_error(e):

      """Flask error handler for ValueError exceptions"""

-     return json_error(400, 'Bad Request', str(e))

+     return json_error(400, "Bad Request", str(e))

  

  

  @app.errorhandler(Exception)

  def internal_server_error(e):

      """Flask error handler for RuntimeError exceptions"""

-     log.exception('Internal server error: %s', e)

-     return json_error(500, 'Internal Server Error', str(e))

+     log.exception("Internal server error: %s", e)

+     return json_error(500, "Internal Server Error", str(e))

file modified
+93 -45
@@ -27,7 +27,10 @@ 

  from odcs.server import conf

  from odcs.server.errors import Forbidden

  from odcs.common.types import (

-     COMPOSE_RESULTS, COMPOSE_FLAGS, INVERSE_PUNGI_SOURCE_TYPE_NAMES)

+     COMPOSE_RESULTS,

+     COMPOSE_FLAGS,

+     INVERSE_PUNGI_SOURCE_TYPE_NAMES,

+ )

  

  

  def _set_default_client_allowed_attrs(ret_attrs, attrs):
@@ -106,7 +109,7 @@ 

      The decision whether the user is allowed or not is done based on

      conf.allowed_clients value.

      """

-     if conf.auth_backend == 'noauth':

+     if conf.auth_backend == "noauth":

          return

  

      errors = set()
@@ -117,7 +120,8 @@ 

                  # This should not happen, but be defensive in this part of code...

                  errors.add(

                      "User %s not allowed to operate with compose with %s=%r."

-                     % (flask.g.user.username, name, values))

+                     % (flask.g.user.username, name, values)

+                 )

                  continue

  

              # Convert integers from db format to string list.
@@ -131,7 +135,9 @@ 

                  # The default conf.target_dir is always allowed.

                  if values == conf.target_dir:

                      continue

-                 inverse_extra_target_dirs = {v: k for k, v in conf.extra_target_dirs.items()}

+                 inverse_extra_target_dirs = {

+                     v: k for k, v in conf.extra_target_dirs.items()

+                 }

                  values = inverse_extra_target_dirs[values]

  

              if type(values) == int:
@@ -147,11 +153,13 @@ 

  

              for value in values:

                  allowed_values = attrs[name]

-                 if ((not allowed_values or value not in allowed_values) and

-                         allowed_values != [""]):

+                 if (

+                     not allowed_values or value not in allowed_values

+                 ) and allowed_values != [""]:

                      errors.add(

                          "User %s not allowed to operate with compose with %s=%s."

-                         % (flask.g.user.username, name, value))

+                         % (flask.g.user.username, name, value)

+                     )

                      found_error = True

                      break

          if not found_error:
@@ -160,7 +168,8 @@ 

          raise Forbidden(" ".join(list(errors)))

      else:

          raise Forbidden(

-             "User %s not allowed to operate with any compose." % flask.g.user.username)

+             "User %s not allowed to operate with any compose." % flask.g.user.username

+         )

  

  

  def validate_json_data(dict_or_list, level=0, last_dict_key=None):
@@ -181,8 +190,7 @@ 

              # Allow only dict with "source" key name in first level of

              # json object.

              if level != 0 or k not in ["source"]:

-                 raise ValueError(

-                     "Only 'source' key is allowed to contain dict.")

+                 raise ValueError("Only 'source' key is allowed to contain dict.")

              validate_json_data(v, level + 1, k)

          elif isinstance(v, list):

              validate_json_data(v, level + 1, k)
@@ -191,12 +199,12 @@ 

              # not exploitable.

              if last_dict_key in ["packages"]:

                  continue

-             allowed_chars = [' ', '-', '/', '_', '.', ':', '#', '+', '?', '$',

-                              '~']

+             allowed_chars = [" ", "-", "/", "_", ".", ":", "#", "+", "?", "$", "~"]

              if not all(c.isalnum() or c in allowed_chars for c in v):

                  raise ValueError(

                      "Only alphanumerical characters and %r characters "

-                     "are allowed in ODCS input variables" % (allowed_chars))

+                     "are allowed in ODCS input variables" % (allowed_chars)

+                 )

          elif isinstance(v, (int, float)):

              # Allow int, float and also bool, because that's subclass of int.

              continue
@@ -204,7 +212,8 @@ 

              raise ValueError(

                  "Only dict, list, str, unicode, int, float and bool types "

                  "are allowed in ODCS input variables, but '%s' has '%s' "

-                 "type" % (k, type(v)))

+                 "type" % (k, type(v))

+             )

  

  

  def pagination_metadata(p_query, request_args):
@@ -221,35 +230,52 @@ 

      # Remove pagination related args because those are handled elsewhere

      # Also, remove any args that url_for accepts in case the user entered

      # those in

-     for key in ['page', 'per_page', 'endpoint']:

+     for key in ["page", "per_page", "endpoint"]:

          if key in request_args_wo_page:

              request_args_wo_page.pop(key)

      for key in request_args:

-         if key.startswith('_'):

+         if key.startswith("_"):

              request_args_wo_page.pop(key)

      pagination_data = {

-         'page': p_query.page,

-         'pages': p_query.pages,

-         'per_page': p_query.per_page,

-         'prev': None,

-         'next': None,

-         'total': p_query.total,

-         'first': url_for(request.endpoint, page=1, per_page=p_query.per_page,

-                          _external=True, **request_args_wo_page),

-         'last': url_for(request.endpoint, page=p_query.pages,

-                         per_page=p_query.per_page, _external=True,

-                         **request_args_wo_page)

+         "page": p_query.page,

+         "pages": p_query.pages,

+         "per_page": p_query.per_page,

+         "prev": None,

+         "next": None,

+         "total": p_query.total,

+         "first": url_for(

+             request.endpoint,

+             page=1,

+             per_page=p_query.per_page,

+             _external=True,

+             **request_args_wo_page

+         ),

+         "last": url_for(

+             request.endpoint,

+             page=p_query.pages,

+             per_page=p_query.per_page,

+             _external=True,

+             **request_args_wo_page

+         ),

      }

  

      if p_query.has_prev:

-         pagination_data['prev'] = url_for(request.endpoint, page=p_query.prev_num,

-                                           per_page=p_query.per_page, _external=True,

-                                           **request_args_wo_page)

+         pagination_data["prev"] = url_for(

+             request.endpoint,

+             page=p_query.prev_num,

+             per_page=p_query.per_page,

+             _external=True,

+             **request_args_wo_page

+         )

  

      if p_query.has_next:

-         pagination_data['next'] = url_for(request.endpoint, page=p_query.next_num,

-                                           per_page=p_query.per_page, _external=True,

-                                           **request_args_wo_page)

+         pagination_data["next"] = url_for(

+             request.endpoint,

+             page=p_query.next_num,

+             per_page=p_query.per_page,

+             _external=True,

+             **request_args_wo_page

+         )

  

      return pagination_data

  
@@ -265,7 +291,7 @@ 

      If "order_by" argument starts with minus sign ('-'), the descending order

      is used.

      """

-     order_by = flask_request.args.get('order_by', default_key, type=str)

+     order_by = flask_request.args.get("order_by", default_key, type=str)

      if order_by and len(order_by) > 1 and order_by[0] == "-":

          order_asc = False

          order_by = order_by[1:]
@@ -274,8 +300,9 @@ 

  

      if order_by not in allowed_keys:

          raise ValueError(

-             'An invalid order_by key was suplied, allowed keys are: '

-             '%r' % allowed_keys)

+             "An invalid order_by key was suplied, allowed keys are: "

+             "%r" % allowed_keys

+         )

  

      order_by_attr = getattr(base_class, order_by)

      if not order_asc:
@@ -291,8 +318,16 @@ 

      """

      search_query = dict()

  

-     for key in ['owner', 'source_type', 'source', 'state', 'koji_task_id',

-                 'pungi_compose_id', 'compose_type', 'label']:

+     for key in [

+         "owner",

+         "source_type",

+         "source",

+         "state",

+         "koji_task_id",

+         "pungi_compose_id",

+         "compose_type",

+         "label",

+     ]:

          if flask_request.args.get(key, None):

              search_query[key] = flask_request.args[key]

  
@@ -301,11 +336,24 @@ 

      if search_query:

          query = query.filter_by(**search_query)

  

-     query = _order_by(flask_request, query, Compose,

-                       ["id", "owner", "source_Type", "koji_event",

-                        "state", "time_to_expire", "time_submitted",

-                        "time_done", "time_removed"], "-id")

- 

-     page = flask_request.args.get('page', 1, type=int)

-     per_page = flask_request.args.get('per_page', 10, type=int)

+     query = _order_by(

+         flask_request,

+         query,

+         Compose,

+         [

+             "id",

+             "owner",

+             "source_Type",

+             "koji_event",

+             "state",

+             "time_to_expire",

+             "time_submitted",

+             "time_done",

+             "time_removed",

+         ],

+         "-id",

+     )

+ 

+     page = flask_request.args.get("page", 1, type=int)

+     per_page = flask_request.args.get("per_page", 10, type=int)

      return query.paginate(page, per_page, False)

file modified
+77 -53
@@ -46,12 +46,16 @@ 

      """

      errors = []

      if not conf.auth_ldap_server:

-         errors.append("kerberos authentication enabled with no LDAP server configured, "

-                       "check AUTH_LDAP_SERVER in your config.")

+         errors.append(

+             "kerberos authentication enabled with no LDAP server configured, "

+             "check AUTH_LDAP_SERVER in your config."

+         )

  

      if not conf.auth_ldap_group_base:

-         errors.append("kerberos authentication enabled with no LDAP group base configured, "

-                       "check AUTH_LDAP_GROUP_BASE in your config.")

+         errors.append(

+             "kerberos authentication enabled with no LDAP group base configured, "

+             "check AUTH_LDAP_GROUP_BASE in your config."

+         )

  

      if errors:

          for error in errors:
@@ -66,11 +70,11 @@ 

      REMOTE_USER needs to be set in environment variable, that is set by

      frontend Apache authentication module.

      """

-     remote_user = request.environ.get('REMOTE_USER')

+     remote_user = request.environ.get("REMOTE_USER")

      if not remote_user:

-         raise Unauthorized('REMOTE_USER is not present in request.')

+         raise Unauthorized("REMOTE_USER is not present in request.")

  

-     username, realm = remote_user.split('@')

+     username, realm = remote_user.split("@")

  

      user = User.find_user_by_name(username)

      if not user:
@@ -79,8 +83,11 @@ 

      try:

          groups = query_ldap_groups(username)

      except ldap.SERVER_DOWN as e:

-         log.error('Cannot query groups of %s from LDAP. Error: %s',

-                   username, e.args[0]['desc'])

+         log.error(

+             "Cannot query groups of %s from LDAP. Error: %s",

+             username,

+             e.args[0]["desc"],

+         )

          groups = []

  

      g.groups = groups
@@ -96,13 +103,15 @@ 

      SSL_CLIENT_VERIFY and SSL_CLIENT_S_DN needs to be set in

      request.environ. This is set by frontend httpd mod_ssl module.

      """

-     ssl_client_verify = request.environ.get('SSL_CLIENT_VERIFY')

-     if ssl_client_verify != 'SUCCESS':

-         raise Unauthorized('Cannot verify client: %s' % ssl_client_verify)

+     ssl_client_verify = request.environ.get("SSL_CLIENT_VERIFY")

+     if ssl_client_verify != "SUCCESS":

+         raise Unauthorized("Cannot verify client: %s" % ssl_client_verify)

  

-     username = request.environ.get('SSL_CLIENT_S_DN')

+     username = request.environ.get("SSL_CLIENT_S_DN")

      if not username:

-         raise Unauthorized('Unable to get user information (DN) from client certificate')

+         raise Unauthorized(

+             "Unable to get user information (DN) from client certificate"

+         )

  

      user = User.find_user_by_name(username)

      if not user:
@@ -117,7 +126,7 @@ 

      """

      Loads User using Kerberos or SSL auth.

      """

-     if request.environ.get('REMOTE_USER'):

+     if request.environ.get("REMOTE_USER"):

          return load_krb_user_from_request(request)

      else:

          return load_ssl_user_from_request(request)
@@ -125,29 +134,31 @@ 

  

  def query_ldap_groups(uid):

      client = ldap.initialize(conf.auth_ldap_server)

-     groups = client.search_s(conf.auth_ldap_group_base,

-                              ldap.SCOPE_ONELEVEL,

-                              attrlist=['cn', 'gidNumber'],

-                              filterstr='memberUid={0}'.format(uid))

+     groups = client.search_s(

+         conf.auth_ldap_group_base,

+         ldap.SCOPE_ONELEVEL,

+         attrlist=["cn", "gidNumber"],

+         filterstr="memberUid={0}".format(uid),

+     )

  

-     group_names = list(chain(*[info['cn'] for _, info in groups]))

+     group_names = list(chain(*[info["cn"] for _, info in groups]))

      return group_names

  

  

  @commit_on_success

  def load_openidc_user(request):

      """Load FAS user from current request"""

-     username = request.environ.get('REMOTE_USER')

+     username = request.environ.get("REMOTE_USER")

      if not username:

-         raise Unauthorized('REMOTE_USER is not present in request.')

+         raise Unauthorized("REMOTE_USER is not present in request.")

  

-     token = request.environ.get('OIDC_access_token')

+     token = request.environ.get("OIDC_access_token")

      if not token:

-         raise Unauthorized('Missing token passed to ODCS.')

+         raise Unauthorized("Missing token passed to ODCS.")

  

-     scope = request.environ.get('OIDC_CLAIM_scope')

+     scope = request.environ.get("OIDC_CLAIM_scope")

      if not scope:

-         raise Unauthorized('Missing OIDC_CLAIM_scope.')

+         raise Unauthorized("Missing OIDC_CLAIM_scope.")

      validate_scopes(scope)

  

      user_info = get_user_info(token)
@@ -156,9 +167,9 @@ 

      if not user:

          user = User.create_user(username=username)

  

-     g.groups = user_info.get('groups', [])

+     g.groups = user_info.get("groups", [])

      g.user = user

-     g.oidc_scopes = scope.split(' ')

+     g.oidc_scopes = scope.split(" ")

      return user

  

  
@@ -168,40 +179,41 @@ 

      :param str scope: scope passed in from.

      :raises: Unauthorized if any of required scopes is not present.

      """

-     scopes = scope.split(' ')

+     scopes = scope.split(" ")

      required_scopes = conf.auth_openidc_required_scopes

      for scope in required_scopes:

          if scope not in scopes:

-             raise Unauthorized('Required OIDC scope {0} not present.'.format(scope))

+             raise Unauthorized("Required OIDC scope {0} not present.".format(scope))

  

  

  def require_oidc_scope(scope):

      """Check if required scopes is in OIDC scopes within request"""

-     full_scope = '{0}{1}'.format(conf.oidc_base_namespace, scope)

+     full_scope = "{0}{1}".format(conf.oidc_base_namespace, scope)

      if conf.auth_backend == "openidc" and full_scope not in g.oidc_scopes:

-         message = 'Request does not have required scope %s' % scope

+         message = "Request does not have required scope %s" % scope

          log.error(message)

          raise Forbidden(message)

  

  

  def require_scopes(*scopes):

      """Check if required scopes is in OIDC scopes within request"""

+ 

      def wrapper(f):

          @wraps(f)

          def decorator(*args, **kwargs):

-             if conf.auth_backend != 'noauth':

+             if conf.auth_backend != "noauth":

                  for scope in scopes:

                      require_oidc_scope(scope)

              return f(*args, **kwargs)

+ 

          return decorator

+ 

      return wrapper

  

  

  def get_user_info(token):

      """Query FAS groups from Fedora"""

-     headers = {

-         'authorization': 'Bearer {0}'.format(token)

-     }

+     headers = {"authorization": "Bearer {0}".format(token)}

      r = requests.get(

          conf.auth_openidc_userinfo_uri, headers=headers, timeout=conf.net_timeout

      )
@@ -210,8 +222,10 @@ 

          # endpoint. We treat this as an empty response - and hence an empty group list. An empty

          # group list only makes our authorization checks more strict, so it should be safe

          # to proceed and check the user.

-         log.warning("Failed to query group information - UserInfo endpoint failed with status=%d",

-                     r.status_code)

+         log.warning(

+             "Failed to query group information - UserInfo endpoint failed with status=%d",

+             r.status_code,

+         )

          return {}

  

      return r.json()
@@ -223,30 +237,33 @@ 

      Enable and initialize authentication backend to work with frontend

      authentication module running in Apache.

      """

-     if backend == 'noauth':

+     if backend == "noauth":

          # Do not enable any authentication backend working with frontend

          # authentication module in Apache.

          log.warning("Authorization is disabled in ODCS configuration.")

          return

-     if backend == 'kerberos':

+     if backend == "kerberos":

          _validate_kerberos_config()

          global load_krb_user_from_request

          load_krb_user_from_request = login_manager.request_loader(

-             load_krb_user_from_request)

-     elif backend == 'openidc':

+             load_krb_user_from_request

+         )

+     elif backend == "openidc":

          global load_openidc_user

          load_openidc_user = login_manager.request_loader(load_openidc_user)

-     elif backend == 'kerberos_or_ssl':

+     elif backend == "kerberos_or_ssl":

          _validate_kerberos_config()

          global load_krb_or_ssl_user_from_request

          load_krb_or_ssl_user_from_request = login_manager.request_loader(

-             load_krb_or_ssl_user_from_request)

-     elif backend == 'ssl':

+             load_krb_or_ssl_user_from_request

+         )

+     elif backend == "ssl":

          global load_ssl_user_from_request

          load_ssl_user_from_request = login_manager.request_loader(

-             load_ssl_user_from_request)

+             load_ssl_user_from_request

+         )

      else:

-         raise ValueError('Unknown backend name {0}.'.format(backend))

+         raise ValueError("Unknown backend name {0}.".format(backend))

  

  

  def has_role(role):
@@ -255,15 +272,15 @@ 

  

      :returns: bool

      """

-     if conf.auth_backend == 'noauth':

+     if conf.auth_backend == "noauth":

          return True

  

      groups = []

-     for group in getattr(conf, role).get('groups', []):

+     for group in getattr(conf, role).get("groups", []):

          groups.append(group)

  

      users = []

-     for user in getattr(conf, role).get('users', []):

+     for user in getattr(conf, role).get("users", []):

          users.append(user)

  

      in_groups = bool(set(flask.g.groups) & set(groups))
@@ -278,9 +295,12 @@ 

  

      :param str role: role name, supported roles: 'allowed_clients', 'admins'.

      """

-     valid_roles = ['allowed_clients', 'admins']

+     valid_roles = ["allowed_clients", "admins"]

      if role not in valid_roles:

-         raise ValueError("Unknown role <%s> specified, supported roles: %s." % (role, str(valid_roles)))

+         raise ValueError(

+             "Unknown role <%s> specified, supported roles: %s."

+             % (role, str(valid_roles))

+         )

  

      def wrapper(f):

          @wraps(f)
@@ -291,15 +311,19 @@ 

              msg = "User %s is not in role %s." % (flask.g.user.username, role)

              log.error(msg)

              raise Forbidden(msg)

+ 

          return wrapped

+ 

      return wrapper

  

  

  def login_required(f):

      """Wrapper of flask_login's login_required to ingore auth check when auth backend is 'noauth'."""

+ 

      @wraps(f)

      def wrapped(*args, **kwargs):

-         if conf.auth_backend == 'noauth':

+         if conf.auth_backend == "noauth":

              return f(*args, **kwargs)

          return _login_required(f)(*args, **kwargs)

+ 

      return wrapped

file modified
+243 -147
@@ -32,7 +32,13 @@ 

  from datetime import datetime, timedelta

  from odcs.server import log, conf, app, db

  from odcs.server.models import Compose, COMPOSE_STATES, COMPOSE_FLAGS

- from odcs.server.pungi import Pungi, PungiConfig, PungiSourceType, PungiLogs, RawPungiConfig

+ from odcs.server.pungi import (

+     Pungi,

+     PungiConfig,

+     PungiSourceType,

+     PungiLogs,

+     RawPungiConfig,

+ )

  from odcs.server.pulp import Pulp

  from odcs.server.cache import KojiTagCache

  from odcs.server.pungi_compose import PungiCompose
@@ -59,6 +65,7 @@ 

      The `BackendThread.do_work(...)` is called repeatedly after `timeout`

      seconds.

      """

+ 

      def __init__(self, timeout=1):

          """

          Creates new BackendThread instance.
@@ -129,6 +136,7 @@ 

      """

      Thread used to remove old expired composes.

      """

+ 

      def __init__(self):

          """

          Creates new RemoveExpiredComposesThread instance.
@@ -164,8 +172,7 @@ 

  

          # Be nice and don't fail when directory does not exist.

          if not os.path.exists(toplevel_dir):

-             log.warning("Cannot remove directory %s, it does not exist",

-                         toplevel_dir)

+             log.warning("Cannot remove directory %s, it does not exist", toplevel_dir)

              return

  

          # Temporary dictionary to store errors from `self._on_rmtree_error`.
@@ -183,8 +190,7 @@ 

              shutil.rmtree(toplevel_dir, onerror=self._on_rmtree_error)

  

          for path, error in self._rmtree_errors.items():

-             log.warning("Cannot remove some files in %s: %r" % (

-                 path, error))

+             log.warning("Cannot remove some files in %s: %r" % (path, error))

  

      def _get_compose_id_from_path(self, path):

          """
@@ -218,8 +224,7 @@ 

              else:

                  state_reason = "Compose is expired."

              if compose.state_reason:

-                 state_reason = '{}\n{}'.format(compose.state_reason,

-                                                state_reason)

+                 state_reason = "{}\n{}".format(compose.state_reason, state_reason)

              compose.transition(COMPOSE_STATES["removed"], state_reason)

              if not compose.reused_id:

                  self._remove_compose_dir(compose.toplevel_dir)
@@ -254,15 +259,22 @@ 

  

              composes = Compose.query.filter(Compose.id == compose_id).all()

              if not composes:

-                 log.info("Removing data of compose %d - it is not in "

-                          "database: %s", compose_id, path)

+                 log.info(

+                     "Removing data of compose %d - it is not in " "database: %s",

+                     compose_id,

+                     path,

+                 )

                  self._remove_compose_dir(path)

                  continue

  

              compose = composes[0]

              if compose.state == COMPOSE_STATES["removed"]:

-                 log.info("%r: Removing data of compose - it has already "

-                          "expired some time ago: %s", compose_id, path)

+                 log.info(

+                     "%r: Removing data of compose - it has already "

+                     "expired some time ago: %s",

+                     compose_id,

+                     path,

+                 )

                  self._remove_compose_dir(path)

                  continue

  
@@ -277,16 +289,24 @@ 

  

      koji_module = koji.get_profile_module(conf.koji_profile)

      session_opts = {}

-     for key in ('krbservice', 'timeout', 'keepalive',

-                 'max_retries', 'retry_interval', 'anon_retry',

-                 'offline_retry', 'offline_retry_interval',

-                 'debug', 'debug_xmlrpc', 'krb_rdns',

-                 'use_fast_upload'):

+     for key in (

+         "krbservice",

+         "timeout",

+         "keepalive",

+         "max_retries",

+         "retry_interval",

+         "anon_retry",

+         "offline_retry",

+         "offline_retry_interval",

+         "debug",

+         "debug_xmlrpc",

+         "krb_rdns",

+         "use_fast_upload",

+     ):

          value = getattr(koji_module.config, key, None)

          if value is not None:

              session_opts[key] = value

-     koji_session = koji.ClientSession(koji_module.config.server,

-                                       session_opts)

+     koji_session = koji.ClientSession(koji_module.config.server, session_opts)

      return koji_session

  

  
@@ -301,13 +321,14 @@ 

      ids = [info["id"]]

      seen_tags = tags or set()

      inheritance_data = koji_session.getInheritanceData(tag)

-     inheritance_data = [data for data in inheritance_data

-                         if data['parent_id'] not in seen_tags]

+     inheritance_data = [

+         data for data in inheritance_data if data["parent_id"] not in seen_tags

+     ]

  

      # Iterate over all the tags this tag inherits from.

      for inherited in inheritance_data:

          # Make a note to ourselves that we have seen this parent_tag.

-         parent_tag_id = inherited['parent_id']

+         parent_tag_id = inherited["parent_id"]

          seen_tags.add(parent_tag_id)

  

          # Get tag info for the parent_tag.
@@ -316,7 +337,7 @@ 

              log.error("Cannot get info about Koji tag %s", parent_tag_id)

              return []

  

-         ids += koji_get_inherited_tags(koji_session, info['name'], seen_tags)

+         ids += koji_get_inherited_tags(koji_session, info["name"], seen_tags)

  

      return ids

  
@@ -359,15 +380,19 @@ 

          # get the same results.

          if not compose.koji_event:

              if compose.source not in LAST_EVENTS_CACHE:

-                 event_id = int(koji_session.getLastEvent()['id'])

-             elif tag_changed(koji_session,

-                              compose.source,

-                              LAST_EVENTS_CACHE[compose.source]):

-                 event_id = int(koji_session.getLastEvent()['id'])

+                 event_id = int(koji_session.getLastEvent()["id"])

+             elif tag_changed(

+                 koji_session, compose.source, LAST_EVENTS_CACHE[compose.source]

+             ):

+                 event_id = int(koji_session.getLastEvent()["id"])

              else:

                  event_id = LAST_EVENTS_CACHE[compose.source]

-                 log.info('Reuse koji event %s to generate compose %s from source %s',

-                          event_id, compose.id, compose.source)

+                 log.info(

+                     "Reuse koji event %s to generate compose %s from source %s",

+                     event_id,

+                     compose.id,

+                     compose.source,

+                 )

              compose.koji_event = event_id

              # event_id could be a new koji event ID. Cache it for next potential

              # reuse for same tag.
@@ -387,7 +412,8 @@ 

              # NSVC.

              is_complete_nsvc = module.count(":") == 3

              specified_mbs_modules += mbs.get_latest_modules(

-                 module, include_done or is_complete_nsvc)

+                 module, include_done or is_complete_nsvc

+             )

  

          expand = not compose.flags & COMPOSE_FLAGS["no_deps"]

          new_mbs_modules = mbs.validate_module_list(specified_mbs_modules, expand=expand)
@@ -395,8 +421,9 @@ 

          uids = sorted(

              "{name}:{stream}:{version}:{context}".format(**m)

              for m in new_mbs_modules

-             if m['name'] not in conf.base_module_names)

-         compose.source = ' '.join(uids)

+             if m["name"] not in conf.base_module_names

+         )

+         compose.source = " ".join(uids)

      elif compose.source_type == PungiSourceType.PUNGI_COMPOSE:

          external_compose = PungiCompose(compose.source)

          rpms_data = external_compose.get_rpms_data()
@@ -424,7 +451,7 @@ 

          packages = set()

          for rpms in rpms_data["builds"].values():

              for rpm_nevra in rpms:

-                 packages.add(productmd.common.parse_nvra(rpm_nevra)['name'])

+                 packages.add(productmd.common.parse_nvra(rpm_nevra)["name"])

          compose.packages = " ".join(packages)

  

  
@@ -441,9 +468,14 @@ 

          return None

  

      # Get all the active composes of the same source_type

-     composes = db.session.query(Compose).filter(

-         Compose.state == COMPOSE_STATES["done"],

-         Compose.source_type == compose.source_type).all()

+     composes = (

+         db.session.query(Compose)

+         .filter(

+             Compose.state == COMPOSE_STATES["done"],

+             Compose.source_type == compose.source_type,

+         )

+         .all()

+     )

  

      for old_compose in composes:

          # Skip the old_compose in case it reuses another compose. In that case
@@ -453,107 +485,130 @@ 

          if old_compose.reused_id:

              continue

  

-         packages = set(compose.packages.split(" ")) \

-             if compose.packages else set()

-         old_packages = set(old_compose.packages.split(" ")) \

-             if old_compose.packages else set()

+         packages = set(compose.packages.split(" ")) if compose.packages else set()

+         old_packages = (

+             set(old_compose.packages.split(" ")) if old_compose.packages else set()

+         )

          if packages != old_packages:

-             log.debug("%r: Cannot reuse %r - packages not same", compose,

-                       old_compose)

+             log.debug("%r: Cannot reuse %r - packages not same", compose, old_compose)

              continue

  

-         builds = set(compose.builds.split(" ")) \

-             if compose.builds else set()

-         old_builds = set(old_compose.builds.split(" ")) \

-             if old_compose.builds else set()

+         builds = set(compose.builds.split(" ")) if compose.builds else set()

+         old_builds = set(old_compose.builds.split(" ")) if old_compose.builds else set()

          if builds != old_builds:

-             log.debug("%r: Cannot reuse %r - builds not same", compose,

-                       old_compose)

+             log.debug("%r: Cannot reuse %r - builds not same", compose, old_compose)

              continue

  

          source = set(compose.source.split(" "))

          old_source = set(old_compose.source.split(" "))

          if source != old_source:

-             log.debug("%r: Cannot reuse %r - sources not same", compose,

-                       old_compose)

+             log.debug("%r: Cannot reuse %r - sources not same", compose, old_compose)

              continue

  

          if compose.flags != old_compose.flags:

-             log.debug("%r: Cannot reuse %r - flags not same, %d != %d",

-                       compose, old_compose, compose.flags,

-                       old_compose.flags)

+             log.debug(

+                 "%r: Cannot reuse %r - flags not same, %d != %d",

+                 compose,

+                 old_compose,

+                 compose.flags,

+                 old_compose.flags,

+             )

              continue

  

          if compose.results != old_compose.results:

-             log.debug("%r: Cannot reuse %r - results not same, %d != %d",

-                       compose, old_compose, compose.results,

-                       old_compose.results)

+             log.debug(

+                 "%r: Cannot reuse %r - results not same, %d != %d",

+                 compose,

+                 old_compose,

+                 compose.results,

+                 old_compose.results,

+             )

              continue

  

-         sigkeys = set(compose.sigkeys.split(" ")) \

-             if compose.sigkeys else set()

-         old_sigkeys = set(old_compose.sigkeys.split(" ")) \

-             if old_compose.sigkeys else set()

+         sigkeys = set(compose.sigkeys.split(" ")) if compose.sigkeys else set()

+         old_sigkeys = (

+             set(old_compose.sigkeys.split(" ")) if old_compose.sigkeys else set()

+         )

          if sigkeys != old_sigkeys:

-             log.debug("%r: Cannot reuse %r - sigkeys not same", compose,

-                       old_compose)

+             log.debug("%r: Cannot reuse %r - sigkeys not same", compose, old_compose)

              continue

  

-         arches = set(compose.arches.split(" ")) \

-             if compose.arches else set()

-         old_arches = set(old_compose.arches.split(" ")) \

-             if old_compose.arches else set()

+         arches = set(compose.arches.split(" ")) if compose.arches else set()

+         old_arches = set(old_compose.arches.split(" ")) if old_compose.arches else set()

          if arches != old_arches:

-             log.debug("%r: Cannot reuse %r - arches not same", compose,

-                       old_compose)

+             log.debug("%r: Cannot reuse %r - arches not same", compose, old_compose)

              continue

  

-         lookaside_repos = set(compose.lookaside_repos.split(" ")) \

-             if compose.lookaside_repos else set()

-         old_lookaside_repos = set(old_compose.lookaside_repos.split(" ")) \

-             if old_compose.lookaside_repos else set()

+         lookaside_repos = (

+             set(compose.lookaside_repos.split(" "))

+             if compose.lookaside_repos

+             else set()

+         )

+         old_lookaside_repos = (

+             set(old_compose.lookaside_repos.split(" "))

+             if old_compose.lookaside_repos

+             else set()

+         )

          if lookaside_repos != old_lookaside_repos:

-             log.debug("%r: Cannot reuse %r - lookaside_repos not same", compose,

-                       old_compose)

+             log.debug(

+                 "%r: Cannot reuse %r - lookaside_repos not same", compose, old_compose

+             )

              continue

  

-         multilib_arches = set(compose.multilib_arches.split(" ")) \

-             if compose.multilib_arches else set()

-         old_multilib_arches = set(old_compose.multilib_arches.split(" ")) \

-             if old_compose.multilib_arches else set()

+         multilib_arches = (

+             set(compose.multilib_arches.split(" "))

+             if compose.multilib_arches

+             else set()

+         )

+         old_multilib_arches = (

+             set(old_compose.multilib_arches.split(" "))

+             if old_compose.multilib_arches

+             else set()

+         )

          if multilib_arches != old_multilib_arches:

-             log.debug("%r: Cannot reuse %r - multilib_arches not same", compose,

-                       old_compose)

+             log.debug(

+                 "%r: Cannot reuse %r - multilib_arches not same", compose, old_compose

+             )

              continue

  

          multilib_method = compose.multilib_method

          old_multilib_method = old_compose.multilib_method

          if multilib_method != old_multilib_method:

-             log.debug("%r: Cannot reuse %r - multilib_method not same", compose,

-                       old_compose)

+             log.debug(

+                 "%r: Cannot reuse %r - multilib_method not same", compose, old_compose

+             )

              continue

  

-         modular_koji_tags = set(compose.modular_koji_tags.split(" ")) \

-             if compose.modular_koji_tags else set()

-         old_modular_koji_tags = set(old_compose.modular_koji_tags.split(" ")) \

-             if old_compose.modular_koji_tags else set()

+         modular_koji_tags = (

+             set(compose.modular_koji_tags.split(" "))

+             if compose.modular_koji_tags

+             else set()

+         )

+         old_modular_koji_tags = (

+             set(old_compose.modular_koji_tags.split(" "))

+             if old_compose.modular_koji_tags

+             else set()

+         )

          if modular_koji_tags != old_modular_koji_tags:

-             log.debug("%r: Cannot reuse %r - modular_koji_tags not same", compose,

-                       old_compose)

+             log.debug(

+                 "%r: Cannot reuse %r - modular_koji_tags not same", compose, old_compose

+             )

              continue

  

          module_defaults_url = compose.module_defaults_url

          old_module_defaults_url = old_compose.module_defaults_url

          if module_defaults_url != old_module_defaults_url:

-             log.debug("%r: Cannot reuse %r - module_defaults_url not same", compose,

-                       old_compose)

+             log.debug(

+                 "%r: Cannot reuse %r - module_defaults_url not same",

+                 compose,

+                 old_compose,

+             )

              continue

  

          target_dir = compose.target_dir

          old_target_dir = old_compose.target_dir

          if target_dir != old_target_dir:

-             log.debug("%r: Cannot reuse %r - target_dir not same", compose,

-                       old_compose)

+             log.debug("%r: Cannot reuse %r - target_dir not same", compose, old_compose)

              continue

  

          # In case of compose renewal, the compose.koji_event will be actually
@@ -561,11 +616,17 @@ 

          # example submitted 1 year ago, so koji_event will be one year old.

          # But the `old_compose` was submitted few days ago at max.

          # In this case, we must never reuse the newer compose for old one.

-         if (compose.koji_event and old_compose.koji_event and

-                 compose.koji_event < old_compose.koji_event):

-             log.debug("%r: Cannot reuse %r - koji_event of current compose "

-                       "is lower than koji_event of old compose.", compose,

-                       old_compose)

+         if (

+             compose.koji_event

+             and old_compose.koji_event

+             and compose.koji_event < old_compose.koji_event

+         ):

+             log.debug(

+                 "%r: Cannot reuse %r - koji_event of current compose "

+                 "is lower than koji_event of old compose.",

+                 compose,

+                 old_compose,

+             )

              continue

  

          if compose.source_type == PungiSourceType.KOJI_TAG:
@@ -573,13 +634,21 @@ 

              # Koji tag have not changed since previous old_compose.

              koji_session = create_koji_session()

              if tag_changed(koji_session, compose.source, old_compose.koji_event):

-                 log.debug("%r: Cannot reuse %r - one of the tags changed "

-                           "since previous compose.", compose, old_compose)

+                 log.debug(

+                     "%r: Cannot reuse %r - one of the tags changed "

+                     "since previous compose.",

+                     compose,

+                     old_compose,

+                 )

                  continue

          elif compose.koji_event != old_compose.koji_event:

-             log.debug("%r: Cannot reuse %r - koji_events not same, %d != %d",

-                       compose, old_compose, compose.koji_event,

-                       old_compose.koji_event)

+             log.debug(

+                 "%r: Cannot reuse %r - koji_events not same, %d != %d",

+                 compose,

+                 old_compose,

+                 compose.koji_event,

+                 old_compose.koji_event,

+             )

              continue

  

          return old_compose
@@ -596,8 +665,9 @@ 

      # Set the reuse_id

      compose.reused_id = compose_to_reuse.id

      # Set the time_to_expire to bigger value from both composes.

-     compose.time_to_expire = max(compose.time_to_expire,

-                                  compose_to_reuse.time_to_expire)

+     compose.time_to_expire = max(

+         compose.time_to_expire, compose_to_reuse.time_to_expire

+     )

      # NOTE: reuse_compose is only called by generate_pungi_compose at this

      # moment. This change will be committed when compose state is transitted,

      # which will call session's commit. If this method is called from somewhere
@@ -612,8 +682,7 @@ 

      will be generated.

      """

      if not data:

-         baseurl = os.path.join(

-             compose.result_repo_url, "$basearch", "os")

+         baseurl = os.path.join(compose.result_repo_url, "$basearch", "os")

          data = """[%s]

  name=ODCS repository for compose %s

  baseurl=%s
@@ -623,7 +692,11 @@ 

  repo_gpgcheck=0

  enabled=1

  enabled_metadata=1

- """ % (compose.name, compose.name, baseurl)

+ """ % (

+             compose.name,

+             compose.name,

+             baseurl,

+         )

  

      # Ensure the directory exists

      dirname = os.path.dirname(compose.result_repofile_path)
@@ -640,20 +713,24 @@ 

      """

      content_sets = compose.source.split(" ")

  

-     pulp = Pulp(server_url=conf.pulp_server_url,

-                 username=conf.pulp_username,

-                 password=conf.pulp_password,

-                 compose=compose)

+     pulp = Pulp(

+         server_url=conf.pulp_server_url,

+         username=conf.pulp_username,

+         password=conf.pulp_password,

+         compose=compose,

+     )

  

      repofile = ""

      repos = pulp.get_repos_from_content_sets(

-         content_sets,

-         compose.flags & COMPOSE_FLAGS["include_unpublished_pulp_repos"])

+         content_sets, compose.flags & COMPOSE_FLAGS["include_unpublished_pulp_repos"]

+     )

      ignore_absent_pulp_repos = compose.flags & COMPOSE_FLAGS["ignore_absent_pulp_repos"]

      if len(repos) != len(content_sets):

          found_content_sets = repos.keys()

-         err = "Failed to find all the content_sets %r in the Pulp, " \

-             "found only %r" % (content_sets, found_content_sets)

+         err = "Failed to find all the content_sets %r in the Pulp, " "found only %r" % (

+             content_sets,

+             found_content_sets,

+         )

          if ignore_absent_pulp_repos:

              log.info(err)

              # Update the source in the compose. This ensures the source matches
@@ -675,7 +752,11 @@ 

  baseurl=%s

  enabled=1

  gpgcheck=0

- """ % (name, name, url)

+ """ % (

+             name,

+             name,

+             url,

+         )

          repofile += r

          arches = arches.union(repo_data["arches"])

          sigkeys = sigkeys.union(repo_data["sigkeys"])
@@ -684,8 +765,7 @@ 

  

      compose.arches = " ".join(arches)

      compose.sigkeys = " ".join(sigkeys)

-     compose.transition(COMPOSE_STATES["done"],

-                        "Compose is generated successfully")

+     compose.transition(COMPOSE_STATES["done"], "Compose is generated successfully")

      log.info("%r: Compose done", compose)

  

  
@@ -737,7 +817,9 @@ 

      # symlink. In this case, we will remove the latest-symlink later too.

      latest_name = "latest-%s" % "-".join(compose.pungi_compose_id.split("-")[:2])

      latest_symlink = os.path.join(symlink_dir, latest_name)

-     remove_latest_symlink = os.path.realpath(symlink) == os.path.realpath(latest_symlink)

+     remove_latest_symlink = os.path.realpath(symlink) == os.path.realpath(

+         latest_symlink

+     )

  

      # Remove non-latest symlink.

      log.info("%r: Removing %s symlink.", compose, symlink)
@@ -789,17 +871,23 @@ 

                  multilib_arches = compose.multilib_arches.split(" ")

              else:

                  multilib_arches = None

-             pungi_cfg = PungiConfig(compose.name, "1", compose.source_type,

-                                     compose.source, packages=packages,

-                                     sigkeys=compose.sigkeys,

-                                     results=compose.results,

-                                     arches=compose.arches.split(" "),

-                                     multilib_arches=multilib_arches,

-                                     multilib_method=compose.multilib_method,

-                                     builds=builds, flags=compose.flags,

-                                     lookaside_repos=compose.lookaside_repos,

-                                     modular_koji_tags=compose.modular_koji_tags,

-                                     module_defaults_url=compose.module_defaults_url)

+             pungi_cfg = PungiConfig(

+                 compose.name,

+                 "1",

+                 compose.source_type,

+                 compose.source,

+                 packages=packages,

+                 sigkeys=compose.sigkeys,

+                 results=compose.results,

+                 arches=compose.arches.split(" "),

+                 multilib_arches=multilib_arches,

+                 multilib_method=compose.multilib_method,

+                 builds=builds,

+                 flags=compose.flags,

+                 lookaside_repos=compose.lookaside_repos,

+                 modular_koji_tags=compose.modular_koji_tags,

+                 module_defaults_url=compose.module_defaults_url,

+             )

              if compose.flags & COMPOSE_FLAGS["no_deps"]:

                  pungi_cfg.gather_method = "nodeps"

              if compose.flags & COMPOSE_FLAGS["no_inheritance"]:
@@ -843,8 +931,7 @@ 

      # If there is no exception generated by the pungi.run() and if

      # validation didn't fail, then we know the compose has been

      # successfully generated.

-     compose.transition(COMPOSE_STATES["done"],

-                        "Compose is generated successfully")

+     compose.transition(COMPOSE_STATES["done"], "Compose is generated successfully")

      log.info("%r: Compose done", compose)

  

      koji_tag_cache.update_cache(compose)
@@ -864,17 +951,19 @@ 

              for arch in rm[variant]:

                  for srpm_nevra, data in six.iteritems(rm[variant][arch]):

                      for rpm_nevra, data in six.iteritems(rm[variant][arch][srpm_nevra]):

-                         if data['category'] == 'source':

+                         if data["category"] == "source":

                              continue

                          rpm_nevras.append(rpm_nevra)

-         rpms = set([productmd.common.parse_nvra(n)['name'] for n in rpm_nevras])

+         rpms = set([productmd.common.parse_nvra(n)["name"] for n in rpm_nevras])

          not_found = []

          for pkg in packages:

              if pkg not in rpms:

                  not_found.append(pkg)

          if not_found:

-             msg = "The following requested packages are not present in the generated compose: %s." % \

-                   " ".join(not_found)

+             msg = (

+                 "The following requested packages are not present in the generated compose: %s."

+                 % " ".join(not_found)

+             )

              log.error(msg)

              raise RuntimeError(msg)

  
@@ -917,7 +1006,9 @@ 

              # Be nice to end user and replace paths to logs or other files with URL

              # accessible to the user.

              if compose.on_default_target_dir:

-                 state_reason = state_reason.replace(conf.target_dir, conf.target_dir_url)

+                 state_reason = state_reason.replace(

+                     conf.target_dir, conf.target_dir_url

+                 )

              compose.transition(COMPOSE_STATES["failed"], state_reason)

  

          compose = Compose.query.filter(Compose.id == compose_id).one()
@@ -935,6 +1026,7 @@ 

      Thread used to query the database for composes in "wait" state and

      generating the composes using Pungi.

      """

+ 

      def __init__(self):

          """

          Creates new ComposerThread instance.
@@ -956,8 +1048,7 @@ 

          Adds the compose to queue of composes to generate, so

          the ThreadPoolExecutor can start working on it.

          """

-         compose.transition(COMPOSE_STATES["generating"],

-                            "Compose thread started")

+         compose.transition(COMPOSE_STATES["generating"], "Compose thread started")

  

          self.currently_generating.append(compose.id)

          if compose.source_type == PungiSourceType.PULP:
@@ -970,8 +1061,7 @@ 

          Gets all the composes in "wait" state. Generates them using Pungi

          by calling `generate_compose(...)` in ThreadPoolExecutor.

          """

-         composes = Compose.query.filter(

-             Compose.state == COMPOSE_STATES["wait"]).all()

+         composes = Compose.query.filter(Compose.state == COMPOSE_STATES["wait"]).all()

  

          for compose in composes:

              log.info("%r: Going to start compose generation.", compose)
@@ -989,16 +1079,21 @@ 

          too_old_datetime = now - timedelta(seconds=max_generating_time)

  

          # Get composes which are in 'generating' state for too long.

-         composes = Compose.query.filter(

-             Compose.state == COMPOSE_STATES["generating"],

-             Compose.time_started < too_old_datetime).order_by(

-                 Compose.id).all()

+         composes = (

+             Compose.query.filter(

+                 Compose.state == COMPOSE_STATES["generating"],

+                 Compose.time_started < too_old_datetime,

+             )

+             .order_by(Compose.id)

+             .all()

+         )

  

          for compose in composes:

              compose.transition(

                  COMPOSE_STATES["failed"],

                  "Compose stuck in 'generating' state for longer than %d "

-                 "seconds." % max_generating_time)

+                 "seconds." % max_generating_time,

+             )

  

      def generate_lost_composes(self):

          """
@@ -1009,7 +1104,8 @@ 

          in the middle of compose generation.

          """

          composes = Compose.query.filter(

-             Compose.state == COMPOSE_STATES["generating"]).all()

+             Compose.state == COMPOSE_STATES["generating"]

+         ).all()

  

          for compose in composes:

              if compose.id in self.currently_generating:

file modified
+7 -5
@@ -73,7 +73,8 @@ 

                      # 3rd party process.

                      log.exception(

                          "Old koji tag cache directory %s removed while checking "

-                         "Koji cache." % path)

+                         "Koji cache." % path

+                     )

                      continue

  

                  if mtime > threshold:
@@ -131,8 +132,7 @@ 

          """

  

          cached_compose_dir = self.cached_compose_dir(compose)

-         log.info("Reusing repodata from old cached compose %s",

-                  cached_compose_dir)

+         log.info("Reusing repodata from old cached compose %s", cached_compose_dir)

  

          # Create the lock. The rmtree and copytree on same fs should not take more

          # than 3 minutes really.
@@ -170,8 +170,10 @@ 

  

          :param models.Compose compose: Compose to update the cache from.

          """

-         if (compose.source_type != PungiSourceType.KOJI_TAG or

-                 compose.state != COMPOSE_STATES["done"]):

+         if (

+             compose.source_type != PungiSourceType.KOJI_TAG

+             or compose.state != COMPOSE_STATES["done"]

+         ):

              log.info("Not caching the compose %s.", compose)

              return

  

@@ -32,7 +32,8 @@ 

  from odcs.server.backend import (

      generate_compose as backend_generate_compose,

      ComposerThread,

-     RemoveExpiredComposesThread)

+     RemoveExpiredComposesThread,

+ )

  from odcs.server.utils import retry

  from odcs.server.models import Compose, COMPOSE_STATES

  from odcs.server.pungi import PungiSourceType
@@ -96,9 +97,7 @@ 

  

  celery_app = Celery("backend", broker=broker_url)

  celery_app.conf.update(conf.celery_config)

- celery_app.conf.update({

-     'task_routes': ('odcs.server.celery_tasks.TaskRouter')

- })

+ celery_app.conf.update({"task_routes": ("odcs.server.celery_tasks.TaskRouter")})

  

  

  class TaskRouter:
@@ -135,8 +134,12 @@ 

                  for key, value in rule.items():

                      if not compose_md.get(key):

                          raise ValueError(

-                             ("Task Router: Routing rule for queue %s for task %s contains an "

-                              "invalid property: %s") % (queue, task_name, key))

+                             (

+                                 "Task Router: Routing rule for queue %s for task %s contains an "

+                                 "invalid property: %s"

+                             )

+                             % (queue, task_name, key)

+                         )

  

                      # if the value of the property from the rule and compose does not match, the

                      # whole rule is ignored and we go to the next rule
@@ -186,7 +189,10 @@ 

      """

      compose = get_odcs_compose(compose_id)

      if compose.state != COMPOSE_STATES["wait"]:

-         raise RuntimeError("The 'generate_compose' called for compose not in 'wait' state: %r" % compose)

+         raise RuntimeError(

+             "The 'generate_compose' called for compose not in 'wait' state: %r"

+             % compose

+         )

      compose.transition(COMPOSE_STATES["generating"], "Compose thread started")

      db.session.commit()

      backend_generate_compose(compose.id)
@@ -220,7 +226,7 @@ 

      if act_obj is not None:

          for i in act_obj.values():

              active += i

-     active = [i['id'] for i in active]

+     active = [i["id"] for i in active]

  

      # Reserved tasks are assigned to particular worker, but

      # are not running yet.
@@ -229,7 +235,7 @@ 

      if res_obj is not None:

          for i in res_obj.values():

              reserved += i

-     reserved = [i['id'] for i in reserved]

+     reserved = [i["id"] for i in reserved]

  

      return set(reserved + active)

  
@@ -269,10 +275,13 @@ 

      to_time = now - timedelta(minutes=3)

  

      # Get composes which are in 'wait' state for too long.

-     composes = Compose.query.filter(

-         Compose.state == COMPOSE_STATES["wait"],

-         Compose.time_submitted < to_time).order_by(

-             Compose.id).all()

+     composes = (

+         Compose.query.filter(

+             Compose.state == COMPOSE_STATES["wait"], Compose.time_submitted < to_time

+         )

+         .order_by(Compose.id)

+         .all()

+     )

  

      # Get the current task ids registered by the workers.

      task_ids = get_current_celery_task_ids()
@@ -285,7 +294,8 @@ 

          if compose.time_submitted < from_time:

              compose.transition(

                  COMPOSE_STATES["failed"],

-                 "Compose stuck in 'wait' state for longer than 3 days.")

+                 "Compose stuck in 'wait' state for longer than 3 days.",

+             )

              continue

  

          log.info("%r: Rescheduling compose stuck in 'wait' state.", compose)

file modified
+3 -1
@@ -84,7 +84,9 @@ 

  

  

  class Package(object):

-     def __init__(self, name, arch=None, type=None, requires=None, is_basearchonly=False):

+     def __init__(

+         self, name, arch=None, type=None, requires=None, is_basearchonly=False

+     ):

          self.name = name

          self.arch = arch

          self.type = type

file modified
+358 -324
@@ -35,50 +35,59 @@ 

      Configure ODCS

      """

      config_module = None

-     config_file = '/etc/odcs/config.py'

-     config_section = 'DevConfiguration'

+     config_file = "/etc/odcs/config.py"

+     config_section = "DevConfiguration"

  

      # automagically detect production environment:

      #   - existing and readable config_file presets ProdConfiguration

      try:

          with open(config_file):

-             config_section = 'ProdConfiguration'

+             config_section = "ProdConfiguration"

      except (OSError, IOError) as e:

          # Use stderr here, because logging is not initialized so far...

-         sys.stderr.write("WARN: Cannot open %s: %s\n" % (

-             config_file, e.strerror))

+         sys.stderr.write("WARN: Cannot open %s: %s\n" % (config_file, e.strerror))

          sys.stderr.write("WARN: DevConfiguration will be used.\n")

  

      # try getting config_file from os.environ

-     if 'ODCS_CONFIG_FILE' in os.environ:

-         config_file = os.environ['ODCS_CONFIG_FILE']

+     if "ODCS_CONFIG_FILE" in os.environ:

+         config_file = os.environ["ODCS_CONFIG_FILE"]

      # try getting config_section from os.environ

-     if 'ODCS_CONFIG_SECTION' in os.environ:

-         config_section = os.environ['ODCS_CONFIG_SECTION']

+     if "ODCS_CONFIG_SECTION" in os.environ:

+         config_section = os.environ["ODCS_CONFIG_SECTION"]

      # TestConfiguration shall only be used for running tests, otherwise...

-     if any(['nosetests' in arg or 'noserunner.py' in arg or 'py.test' in arg or 'pytest.py' in arg for arg in sys.argv]):

-         config_section = 'TestConfiguration'

+     if any(

+         [

+             "nosetests" in arg

+             or "noserunner.py" in arg

+             or "py.test" in arg

+             or "pytest.py" in arg

+             for arg in sys.argv

+         ]

+     ):

+         config_section = "TestConfiguration"

          from conf import config

+ 

          config_module = config

      # ...ODCS_DEVELOPER_ENV has always the last word

      # and overrides anything previously set before!

      # In any of the following cases, use configuration directly from ODCS

      # package -> /conf/config.py.

  

-     elif ('ODCS_DEVELOPER_ENV' in os.environ and

-           os.environ['ODCS_DEVELOPER_ENV'].lower() in (

-             '1', 'on', 'true', 'y', 'yes')):

-         config_section = 'DevConfiguration'

+     elif "ODCS_DEVELOPER_ENV" in os.environ and os.environ[

+         "ODCS_DEVELOPER_ENV"

+     ].lower() in ("1", "on", "true", "y", "yes"):

+         config_section = "DevConfiguration"

          from conf import config

+ 

          config_module = config

      # try loading configuration from file

      if not config_module:

          try:

-             config_module = imp.load_source('odcs_runtime_config',

-                                             config_file)

+             config_module = imp.load_source("odcs_runtime_config", config_file)

          except Exception:

-             raise SystemError("Configuration file {} was not found."

-                               .format(config_file))

+             raise SystemError(

+                 "Configuration file {} was not found.".format(config_file)

+             )

  

      # finally configure ODCS

      config_section_obj = getattr(config_module, config_section)
@@ -89,309 +98,325 @@ 

  

  class Config(object):

      """Class representing the odcs configuration."""

+ 

      _defaults = {

-         'debug': {

-             'type': bool,

-             'default': False,

-             'desc': 'Debug mode'},

-         'log_backend': {

-             'type': str,

-             'default': None,

-             'desc': 'Log backend'},

-         'log_file': {

-             'type': str,

-             'default': '',

-             'desc': 'Path to log file'},

-         'log_level': {

-             'type': str,

-             'default': 0,

-             'desc': 'Log level'},

-         'net_timeout': {

-             'type': int,

-             'default': 120,

-             'desc': 'Global network timeout for read/write operations, in seconds.'},

-         'net_retry_interval': {

-             'type': int,

-             'default': 30,

-             'desc': 'Global network retry interval for read/write operations, in seconds.'},

-         'arches': {

-             'type': list,

-             'default': ["x86_64"],

-             'desc': 'Compose architectures.'},

-         'pungi_koji': {

-             'type': str,

-             'default': "pungi-koji",

-             'desc': 'Name or full-path to pungi-koji binary.'},

-         'pungi_config_validate': {

-             'type': str,

-             'default': "",

-             'desc': 'Name or full-path to pungi-config-validate binary. '

-                     'If set to empty string, no validation is done.'},

-         'pungi_timeout': {

-             'type': int,

-             'default': 3600,

-             'desc': 'Time in seconds after which the local pungi-koji is '

-                     'killed and compose is marked as failed'},

-         'mergerepo_timeout': {

-             'type': int,

-             'default': 1800,

-             'desc': 'Time in seconds after which the mergerepo_c is '

-                     'killed and compose is marked as failed'},

-         'pungi_conf_path': {

-             'type': str,

-             'default': "/etc/odcs/pungi.conf",

-             'desc': 'Full path to the pungi.conf jinja2 template.'},

-         'target_dir': {

-             'type': str,

-             'default': "/tmp",

-             'desc': 'Path to target dir to which store composes'},

-         'target_dir_url': {

-             'type': str,

-             'default': "http://localhost/odcs",

-             'desc': 'Public facing URL to target_dir.'},

-         'extra_target_dirs': {

-             'type': dict,

-             'default': {},

-             'desc': 'Extra target_dirs to optionally store the compose on '

-                     'instead of the conf.target_dir. Key is the name '

-                     'of volume, value if path to target_dir.'},

-         'seconds_to_live': {

-             'type': int,

-             'default': 24 * 60 * 60,

-             'desc': 'Default number of seconds for which the compose is available.'},

-         'max_seconds_to_live': {

-             'type': int,

-             'default': 72 * 60 * 60,

-             'desc': 'Max number of seconds for which the compose is available.'},

-         'mbs_url': {

-             'type': str,

-             'default': "http://localhost/module-build-service",

-             'desc': 'URL to MSB API.'},

-         'num_concurrent_pungi': {

-             'type': int,

-             'default': 2,

-             'desc': 'Number of concurrent Pungi processes.'},

-         'allowed_source_types': {

-             'type': list,

-             'default': ["tag", "module", "build"],

-             'desc': 'Allowed source types.'},

-         'allowed_flags': {

-             'type': list,

-             'default': COMPOSE_FLAGS.keys(),

-             'desc': 'Allowed compose flags.'},

-         'allowed_arches': {

-             'type': list,

-             'default': ["x86_64"],

-             'desc': 'Allowed compose arches.'},

-         'allowed_results': {

-             'type': list,

-             'default': COMPOSE_RESULTS,

-             'desc': 'Allowed compose results.'},

-         'allowed_sources': {

-             'type': list,

-             'default': [""],

-             'desc': 'Allowed sources.'},

-         'allowed_compose_types': {

-             'type': list,

-             'default': [""],

-             'desc': 'Allowed compose types.'},

-         'auth_ldap_server': {

-             'type': str,

-             'default': '',

-             'desc': "Server URL to query user's groups."},

-         'auth_ldap_group_base': {

-             'type': str,

-             'default': '',

-             'desc': "Group base to query user's groups from LDAP server."},

-         'allowed_clients': {

-             'type': dict,

-             'default': {'groups': {}, 'users': {}},

-             'desc': "Groups and users that are allowed to generate composes."},

-         'admins': {

-             'type': dict,

-             'default': {'groups': [], 'users': []},

-             'desc': "Admin groups and users."},

-         'auth_backend': {

-             'type': str,

-             'default': '',

-             'desc': "Select which authentication backend is enabled and work "

-                     "with frond-end authentication together."},

-         'auth_openidc_userinfo_uri': {

-             'type': str,

-             'default': '',

-             'desc': 'UserInfo endpoint to get user information from FAS.'},

-         'auth_openidc_required_scopes': {

-             'type': list,

-             'default': [],

-             'desc': 'Required scopes for submitting request to run new compose.'},

-         'base_module_names': {

-             'type': set,

-             'default': set(['platform', 'bootstrap']),

-             'desc': ("Set of module names which defines the product version "

-                      "(by their stream) of modules depending on them.")},

-         'messaging_backend': {

-             'type': str,

-             'default': '',

-             'desc': 'Messaging backend, rhmsg or fedora-messaging.'},

-         'messaging_broker_urls': {

-             'type': list,

-             'default': [],

-             'desc': 'List of messaging broker URLs.'},

-         'messaging_cert_file': {

-             'type': str,

-             'default': '',

-             'desc': 'Path to certificate file used to authenticate ODCS by broker.'},

-         'messaging_key_file': {

-             'type': str,

-             'default': '',

-             'desc': 'Path to private key file used to authenticate ODCS by broker.'},

-         'messaging_ca_cert': {

-             'type': str,

-             'default': '',

-             'desc': 'Path to trusted CA certificate bundle.'},

-         'messaging_topic_prefix': {

-             'type': str,

-             'default': '',

-             'desc': 'Prefix for MESSAGING_TOPIC and INTERNAL_MESSAGING_TOPIC.'

-         },

-         'messaging_topic': {

-             'type': str,

-             'default': '',

-             'desc': 'Messaging topic to which messages are sent.'},

-         'internal_messaging_topic': {

-             'type': str,

-             'default': '',

-             'desc': 'Messaging topic to which internal ODCS-only messages are sent.'},

-         'oidc_base_namespace': {

-             'type': str,

-             'default': 'https://pagure.io/odcs/',

-             'desc': 'Base namespace of OIDC scopes.'},

-         'sigkeys': {

-             'type': list,

-             'default': [],

-             'desc': 'Default list of sigkeys. Any package in a compose must '

-                     'be signed by one of those keys. Can be overriden in a '

-                     'compose request.'},

-         'pulp_server_url': {

-             'type': str,

-             'default': '',

-             'desc': 'Server URL of Pulp.'},

-         'pulp_username': {

-             'type': str,

-             'default': '',

-             'desc': 'Username to login Pulp.'},

-         'pulp_password': {

-             'type': str,

-             'default': '',

-             'desc': 'Password to login Pulp.'},

-         'koji_config': {

-             'type': str,

-             'default': None,

-             'desc': 'Koji config file.'},

-         'koji_profile': {

-             'type': str,

-             'default': None,

-             'desc': 'Koji config profile.'},

-         'koji_krb_ccache': {

-             'type': str,

-             'default': None,

-             'desc': 'Kerberos ccache file to use for Koji auth.'},

-         'koji_krb_keytab': {

-             'type': str,

-             'default': None,

-             'desc': 'Kerberos keytab to use for Koji auth.'},

-         'koji_krb_principal': {

-             'type': str,

-             'default': None,

-             'desc': 'Kerberos principal to use for Koji auth.'},

-         'koji_tag_cache_cleanup_timeout': {

-             'type': int,

-             'default': 30,

-             'desc': 'Number of days after which the cached Koji tag data '

-                     'stored in the "koji_tag_cache" directory will be '

-                     'removed.'},

-         'raw_config_urls': {

-             'type': dict,

-             'default': {},

-             'desc': 'URLs to get the raw Pungi config from for "raw_config" '

-                     'source_type. Key is the name of the raw_config "source", '

-                     'value is the URL in which the %s string is replaced with, '

-                     '"commit_hash" value.'},

-         'raw_config_wrapper_conf_path': {

-             'type': str,

-             'default': "/etc/odcs/raw_config_wrapper.conf",

-             'desc': 'Full path to the raw_config_wrapper.conf configuration '

-                     'file. This file holds Pungi configuration which should '

-                     'import real pungi configuration from raw_config.conf '

-                     'in order to override some variables.'},

-         'raw_config_schema_override': {

-             'type': str,

-             'default': "",

-             'desc': 'Full path to the JSON file defining Pungi config schema '

-                     'override. This file is passed to "pungi-config-validate" '

-                     'using the --schema-override option and can influence '

-                     'the raw_config validation.'},

-         'pungi_koji_args': {

-             'type': list,

-             'default': [],

-             'desc': 'Command line arguments used to construct pungi-koji '

-                     'command.'

-         },

-         'raw_config_pungi_koji_args': {

-             'type': dict,

-             'default': {},

-             'desc': 'Command line argument for raw_config source type, which '

-                     'overwrite arguments listed PUNGI_KOJI_ARGS.'

-         },

-         'celery_config': {

-             'type': dict,

-             'default': {},

-             'desc': 'Configuration dict to pass to Celery.'

-         },

-         'celery_broker_url': {

-             'type': str,

-             'default': "",

-             'desc': 'Celery broker URL'

-         },

-         'celery_pulp_composes_queue': {

-             'type': str,

-             'default': "pulp_composes",

-             'desc': 'Name of the Celery queue for Pulp composes.'

-         },

-         'celery_pungi_composes_queue': {

-             'type': str,

-             'default': "pungi_composes",

-             'desc': 'Name of the Celery queue for Pungi composes.'

-         },

-         'celery_cleanup_queue': {

-             'type': str,

-             'default': "cleanup",

-             'desc': 'Name of the Celery queue for cleanup task.'

-         },

-         'celery_router_config': {

-             'type': dict,

-             'default': {

+         "debug": {"type": bool, "default": False, "desc": "Debug mode"},

+         "log_backend": {"type": str, "default": None, "desc": "Log backend"},

+         "log_file": {"type": str, "default": "", "desc": "Path to log file"},

+         "log_level": {"type": str, "default": 0, "desc": "Log level"},

+         "net_timeout": {

+             "type": int,

+             "default": 120,

+             "desc": "Global network timeout for read/write operations, in seconds.",

+         },

+         "net_retry_interval": {

+             "type": int,

+             "default": 30,

+             "desc": "Global network retry interval for read/write operations, in seconds.",

+         },

+         "arches": {

+             "type": list,

+             "default": ["x86_64"],

+             "desc": "Compose architectures.",

+         },

+         "pungi_koji": {

+             "type": str,

+             "default": "pungi-koji",

+             "desc": "Name or full-path to pungi-koji binary.",

+         },

+         "pungi_config_validate": {

+             "type": str,

+             "default": "",

+             "desc": "Name or full-path to pungi-config-validate binary. "

+             "If set to empty string, no validation is done.",

+         },

+         "pungi_timeout": {

+             "type": int,

+             "default": 3600,

+             "desc": "Time in seconds after which the local pungi-koji is "

+             "killed and compose is marked as failed",

+         },

+         "mergerepo_timeout": {

+             "type": int,

+             "default": 1800,

+             "desc": "Time in seconds after which the mergerepo_c is "

+             "killed and compose is marked as failed",

+         },

+         "pungi_conf_path": {

+             "type": str,

+             "default": "/etc/odcs/pungi.conf",

+             "desc": "Full path to the pungi.conf jinja2 template.",

+         },

+         "target_dir": {

+             "type": str,

+             "default": "/tmp",

+             "desc": "Path to target dir to which store composes",

+         },

+         "target_dir_url": {

+             "type": str,

+             "default": "http://localhost/odcs",

+             "desc": "Public facing URL to target_dir.",

+         },

+         "extra_target_dirs": {

+             "type": dict,

+             "default": {},

+             "desc": "Extra target_dirs to optionally store the compose on "

+             "instead of the conf.target_dir. Key is the name "

+             "of volume, value if path to target_dir.",

+         },

+         "seconds_to_live": {

+             "type": int,

+             "default": 24 * 60 * 60,

+             "desc": "Default number of seconds for which the compose is available.",

+         },

+         "max_seconds_to_live": {

+             "type": int,

+             "default": 72 * 60 * 60,

+             "desc": "Max number of seconds for which the compose is available.",

+         },

+         "mbs_url": {

+             "type": str,

+             "default": "http://localhost/module-build-service",

+             "desc": "URL to MSB API.",

+         },

+         "num_concurrent_pungi": {

+             "type": int,

+             "default": 2,

+             "desc": "Number of concurrent Pungi processes.",

+         },

+         "allowed_source_types": {

+             "type": list,

+             "default": ["tag", "module", "build"],

+             "desc": "Allowed source types.",

+         },

+         "allowed_flags": {

+             "type": list,

+             "default": COMPOSE_FLAGS.keys(),

+             "desc": "Allowed compose flags.",

+         },

+         "allowed_arches": {

+             "type": list,

+             "default": ["x86_64"],

+             "desc": "Allowed compose arches.",

+         },

+         "allowed_results": {

+             "type": list,

+             "default": COMPOSE_RESULTS,

+             "desc": "Allowed compose results.",

+         },

+         "allowed_sources": {"type": list, "default": [""], "desc": "Allowed sources."},

+         "allowed_compose_types": {

+             "type": list,

+             "default": [""],

+             "desc": "Allowed compose types.",

+         },

+         "auth_ldap_server": {

+             "type": str,

+             "default": "",

+             "desc": "Server URL to query user's groups.",

+         },

+         "auth_ldap_group_base": {

+             "type": str,

+             "default": "",

+             "desc": "Group base to query user's groups from LDAP server.",

+         },

+         "allowed_clients": {

+             "type": dict,

+             "default": {"groups": {}, "users": {}},

+             "desc": "Groups and users that are allowed to generate composes.",

+         },

+         "admins": {

+             "type": dict,

+             "default": {"groups": [], "users": []},

+             "desc": "Admin groups and users.",

+         },

+         "auth_backend": {

+             "type": str,

+             "default": "",

+             "desc": "Select which authentication backend is enabled and work "

+             "with frond-end authentication together.",

+         },

+         "auth_openidc_userinfo_uri": {

+             "type": str,

+             "default": "",

+             "desc": "UserInfo endpoint to get user information from FAS.",

+         },

+         "auth_openidc_required_scopes": {

+             "type": list,

+             "default": [],

+             "desc": "Required scopes for submitting request to run new compose.",

+         },

+         "base_module_names": {

+             "type": set,

+             "default": set(["platform", "bootstrap"]),

+             "desc": (

+                 "Set of module names which defines the product version "

+                 "(by their stream) of modules depending on them."

+             ),

+         },

+         "messaging_backend": {

+             "type": str,

+             "default": "",

+             "desc": "Messaging backend, rhmsg or fedora-messaging.",

+         },

+         "messaging_broker_urls": {

+             "type": list,

+             "default": [],

+             "desc": "List of messaging broker URLs.",

+         },

+         "messaging_cert_file": {

+             "type": str,

+             "default": "",

+             "desc": "Path to certificate file used to authenticate ODCS by broker.",

+         },

+         "messaging_key_file": {

+             "type": str,

+             "default": "",

+             "desc": "Path to private key file used to authenticate ODCS by broker.",

+         },

+         "messaging_ca_cert": {

+             "type": str,

+             "default": "",

+             "desc": "Path to trusted CA certificate bundle.",

+         },

+         "messaging_topic_prefix": {

+             "type": str,

+             "default": "",

+             "desc": "Prefix for MESSAGING_TOPIC and INTERNAL_MESSAGING_TOPIC.",

+         },

+         "messaging_topic": {

+             "type": str,

+             "default": "",

+             "desc": "Messaging topic to which messages are sent.",

+         },

+         "internal_messaging_topic": {

+             "type": str,

+             "default": "",

+             "desc": "Messaging topic to which internal ODCS-only messages are sent.",

+         },

+         "oidc_base_namespace": {

+             "type": str,

+             "default": "https://pagure.io/odcs/",

+             "desc": "Base namespace of OIDC scopes.",

+         },

+         "sigkeys": {

+             "type": list,

+             "default": [],

+             "desc": "Default list of sigkeys. Any package in a compose must "

+             "be signed by one of those keys. Can be overriden in a "

+             "compose request.",

+         },

+         "pulp_server_url": {"type": str, "default": "", "desc": "Server URL of Pulp."},

+         "pulp_username": {

+             "type": str,

+             "default": "",

+             "desc": "Username to login Pulp.",

+         },

+         "pulp_password": {

+             "type": str,

+             "default": "",

+             "desc": "Password to login Pulp.",

+         },

+         "koji_config": {"type": str, "default": None, "desc": "Koji config file."},

+         "koji_profile": {"type": str, "default": None, "desc": "Koji config profile."},

+         "koji_krb_ccache": {

+             "type": str,

+             "default": None,

+             "desc": "Kerberos ccache file to use for Koji auth.",

+         },

+         "koji_krb_keytab": {

+             "type": str,

+             "default": None,

+             "desc": "Kerberos keytab to use for Koji auth.",

+         },

+         "koji_krb_principal": {

+             "type": str,

+             "default": None,

+             "desc": "Kerberos principal to use for Koji auth.",

+         },

+         "koji_tag_cache_cleanup_timeout": {

+             "type": int,

+             "default": 30,

+             "desc": "Number of days after which the cached Koji tag data "

+             'stored in the "koji_tag_cache" directory will be '

+             "removed.",

+         },

+         "raw_config_urls": {

+             "type": dict,

+             "default": {},

+             "desc": 'URLs to get the raw Pungi config from for "raw_config" '

+             'source_type. Key is the name of the raw_config "source", '

+             "value is the URL in which the %s string is replaced with, "

+             '"commit_hash" value.',

+         },

+         "raw_config_wrapper_conf_path": {

+             "type": str,

+             "default": "/etc/odcs/raw_config_wrapper.conf",

+             "desc": "Full path to the raw_config_wrapper.conf configuration "

+             "file. This file holds Pungi configuration which should "

+             "import real pungi configuration from raw_config.conf "

+             "in order to override some variables.",

+         },

+         "raw_config_schema_override": {

+             "type": str,

+             "default": "",

+             "desc": "Full path to the JSON file defining Pungi config schema "

+             'override. This file is passed to "pungi-config-validate" '

+             "using the --schema-override option and can influence "

+             "the raw_config validation.",

+         },

+         "pungi_koji_args": {

+             "type": list,

+             "default": [],

+             "desc": "Command line arguments used to construct pungi-koji " "command.",

+         },

+         "raw_config_pungi_koji_args": {

+             "type": dict,

+             "default": {},

+             "desc": "Command line argument for raw_config source type, which "

+             "overwrite arguments listed PUNGI_KOJI_ARGS.",

+         },

+         "celery_config": {

+             "type": dict,

+             "default": {},

+             "desc": "Configuration dict to pass to Celery.",

+         },

+         "celery_broker_url": {"type": str, "default": "", "desc": "Celery broker URL"},

+         "celery_pulp_composes_queue": {

+             "type": str,

+             "default": "pulp_composes",

+             "desc": "Name of the Celery queue for Pulp composes.",

+         },

+         "celery_pungi_composes_queue": {

+             "type": str,

+             "default": "pungi_composes",

+             "desc": "Name of the Celery queue for Pungi composes.",

+         },

+         "celery_cleanup_queue": {

+             "type": str,

+             "default": "cleanup",

+             "desc": "Name of the Celery queue for cleanup task.",

+         },

+         "celery_router_config": {

+             "type": dict,

+             "default": {

                  "routing_rules": {

                      "odcs.server.celery_tasks.generate_pungi_compose": {

-                         "pungi_composes": {

-                             "source_type": 3,

-                         },

+                         "pungi_composes": {"source_type": 3},

                      },

                      "odcs.server.celery_tasks.generate_pulp_compose": {

-                         "pulp_composes": {

-                             "source_type": 4,

-                         },

+                         "pulp_composes": {"source_type": 4},

                      },

                  },

                  "cleanup_task": "odcs.server.celery_tasks.run_cleanup",

                  "default_queue": "pungi_composes",

              },

-             'desc': 'Configuration for custom celery router.'

+             "desc": "Configuration for custom celery router.",

          },

-         'runroot_extra_mounts': {

-             'type': list,

-             'default': [],

-             'desc': 'Extra mountpoint directories for odcs-mock-runroot.'

+         "runroot_extra_mounts": {

+             "type": list,

+             "default": [],

+             "desc": "Extra mountpoint directories for odcs-mock-runroot.",

          },

      }

  
@@ -404,7 +429,7 @@ 

          # read items from conf and set

          for key in dir(conf_section_obj):

              # skip keys starting with underscore

-             if key.startswith('_'):

+             if key.startswith("_"):

                  continue

              # set item (lower key)

              self.set_item(key.lower(), getattr(conf_section_obj, key))
@@ -413,17 +438,17 @@ 

          for name, values in self._defaults.items():

              if hasattr(self, name):

                  continue

-             self.set_item(name, values['default'])

+             self.set_item(name, values["default"])

  

          # Used by Flask-Login to disable the @login_required decorator

-         self.login_disabled = self.auth_backend == 'noauth'

+         self.login_disabled = self.auth_backend == "noauth"

  

      def set_item(self, key, value):

          """

          Set value for configuration item. Creates the self._key = value

          attribute and self.key property to set/get/del the attribute.

          """

-         if key == 'set_item' or key.startswith('_'):

+         if key == "set_item" or key.startswith("_"):

              raise Exception("Configuration item's name is not allowed: %s" % key)

  

          # Create the empty self._key attribute, so we can assign to it.
@@ -431,7 +456,7 @@ 

  

          # Create self.key property to access the self._key attribute.

          # Use the setifok_func if available for the attribute.

-         setifok_func = '_setifok_{}'.format(key)

+         setifok_func = "_setifok_{}".format(key)

          if hasattr(self, setifok_func):

              setx = lambda self, val: getattr(self, setifok_func)(val)

          else:
@@ -443,17 +468,22 @@ 

          # managed/registered configuration items

          if key in self._defaults:

              # type conversion for configuration item

-             convert = self._defaults[key]['type']

+             convert = self._defaults[key]["type"]

              if convert in [bool, int, list, str, set, dict, float]:

                  try:

                      # Do no try to convert None...

                      if value is not None:

                          value = convert(value)

                  except Exception:

-                     raise TypeError("Configuration value conversion failed for name: %s" % key)

+                     raise TypeError(

+                         "Configuration value conversion failed for name: %s" % key

+                     )

              # unknown type/unsupported conversion

              elif convert is not None:

-                 raise TypeError("Unsupported type %s for configuration item name: %s" % (convert, key))

+                 raise TypeError(

+                     "Unsupported type %s for configuration item name: %s"

+                     % (convert, key)

+                 )

  

          # Set the attribute to the correct value

          setattr(self, key, value)
@@ -473,12 +503,14 @@ 

              for user, user_dict in role_dict.items():

                  if type(user_dict) != dict:

                      raise TypeError(

-                         "allowed_clients['%s']['%s'] is not a dict" % (role, user))

+                         "allowed_clients['%s']['%s'] is not a dict" % (role, user)

+                     )

                  for key, value in user_dict.items():

                      if type(value) not in [set, list]:

                          raise ValueError(

                              "allowed_clients['%s']['%s']['%s'] is not a "

-                             "list" % (role, user, key))

+                             "list" % (role, user, key)

+                         )

          self._allowed_clients = clients

  

      def _setifok_raw_config_urls(self, raw_config_urls):
@@ -490,8 +522,8 @@ 

              for key in ["url", "config_filename"]:

                  if key not in url_data:

                      raise ValueError(

-                         "raw_config_urls['%s']['%s'] is not defined."

-                         % (name, key))

+                         "raw_config_urls['%s']['%s'] is not defined." % (name, key)

+                     )

          self._raw_config_urls = raw_config_urls

  

      def _setifok_log_backend(self, s):
@@ -515,7 +547,9 @@ 

          if not os.path.isabs(s):

              raise ValueError("Compose target dir is not an absolute path: %s" % s)

          if not (os.path.exists(s) and os.path.isdir(s)):

-             raise ValueError("Compose target dir doesn't exist or not a directory: %s" % s)

+             raise ValueError(

+                 "Compose target dir doesn't exist or not a directory: %s" % s

+             )

          self._target_dir = s

  

      def _setifok_pungi_conf_path(self, s):

file modified
+12 -11
@@ -38,11 +38,14 @@ 

  

      from odcs.server.models import Compose

  

-     composes = (item for item in (session.new | session.dirty)

-                 if isinstance(item, Compose))

+     composes = (

+         item for item in (session.new | session.dirty) if isinstance(item, Compose)

+     )

      composes_state_changed = (

-         compose for compose in composes

-         if not attributes.get_history(compose, 'state').unchanged)

+         compose

+         for compose in composes

+         if not attributes.get_history(compose, "state").unchanged

+     )

  

      with _cache_lock:

          for comp in composes_state_changed:
@@ -50,8 +53,9 @@ 

                  _cached_composes[comp.id] = []

              _cached_composes[comp.id].append(comp.json())

  

-     log.debug('Cached composes to be sent due to state changed: %s',

-               _cached_composes.keys())

+     log.debug(

+         "Cached composes to be sent due to state changed: %s", _cached_composes.keys()

+     )

  

  

  def start_to_publish_messages(session):
@@ -62,11 +66,8 @@ 

          msgs = []

          for compose_jsons in _cached_composes.values():

              for compose_json in compose_jsons:

-                 msgs.append({

-                     'event': 'state-changed',

-                     'compose': compose_json,

-                 })

-         log.debug('Sending messages: %s', msgs)

+                 msgs.append({"event": "state-changed", "compose": compose_json})

+         log.debug("Sending messages: %s", msgs)

          if msgs:

              try:

                  messaging.publish(msgs)

file modified
+4 -3
@@ -71,7 +71,7 @@ 

      """

      Initializes logging according to configuration file.

      """

-     log_format = '%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s'

+     log_format = "%(asctime)s - %(threadName)s - %(name)s - %(levelname)s - %(message)s"

      log_backend = conf.log_backend

  

      if not log_backend or len(log_backend) == 0 or log_backend == "console":
@@ -89,6 +89,7 @@ 

          log.propagate = False

          log.addHandler(journal.JournalHandler())

      else:

-         logging.basicConfig(filename=conf.log_file, level=conf.log_level,

-                             format=log_format)

+         logging.basicConfig(

+             filename=conf.log_file, level=conf.log_level, format=log_format

+         )

          log = logging.getLogger()

file modified
+35 -36
@@ -31,29 +31,34 @@ 

  

  

  manager = Manager(app)

- help_args = ('-?', '--help')

+ help_args = ("-?", "--help")

  manager.help_args = help_args

- migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),

-                               'migrations')

+ migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), "migrations")

  migrate = flask_migrate.Migrate(app, db, directory=migrations_dir)

- manager.add_command('db', flask_migrate.MigrateCommand)

+ manager.add_command("db", flask_migrate.MigrateCommand)

  

  

  def console_script_help(f):

      @wraps(f)

      def wrapped(*args, **kwargs):

          import sys

+ 

          if any([arg in help_args for arg in sys.argv[1:]]):

              command = os.path.basename(sys.argv[0])

-             print("""{0}

+             print(

+                 """{0}

  

  Usage: {0} [{1}]

  

  See also:

-   odcs-manager(1)""".format(command, '|'.join(help_args)))

+   odcs-manager(1)""".format(

+                     command, "|".join(help_args)

+                 )

+             )

              sys.exit(2)

          r = f(*args, **kwargs)

          return r

+ 

      return wrapped

  

  
@@ -62,9 +67,9 @@ 

          return None

      # First, do some validation of the configuration

      attributes = (

-         'ssl_certificate_file',

-         'ssl_certificate_key_file',

-         'ssl_ca_certificate_file',

+         "ssl_certificate_file",

+         "ssl_certificate_key_file",

+         "ssl_ca_certificate_file",

      )

  

      for attribute in attributes:
@@ -76,8 +81,7 @@ 

  

      # Then, establish the ssl context and return it

      ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)

-     ssl_ctx.load_cert_chain(conf.ssl_certificate_file,

-                             conf.ssl_certificate_key_file)

+     ssl_ctx.load_cert_chain(conf.ssl_certificate_file, conf.ssl_certificate_key_file)

      ssl_ctx.verify_mode = ssl.CERT_OPTIONAL

      ssl_ctx.load_verify_locations(cafile=conf.ssl_ca_certificate_file)

      return ssl_ctx
@@ -88,9 +92,10 @@ 

  def upgradedb():

      """ Upgrades the database schema to the latest revision

      """

-     app.config["SERVER_NAME"] = 'localhost'

-     migrations_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),

-                                   'migrations')

+     app.config["SERVER_NAME"] = "localhost"

+     migrations_dir = os.path.join(

+         os.path.abspath(os.path.dirname(__file__)), "migrations"

+     )

      with app.app_context():

          flask_migrate.upgrade(directory=migrations_dir)

  
@@ -111,21 +116,21 @@ 

      """ Creates a public/private key pair for message signing and the frontend

      """

      from OpenSSL import crypto

+ 

      cert_key = crypto.PKey()

      cert_key.generate_key(crypto.TYPE_RSA, 2048)

  

-     with open(conf.ssl_certificate_key_file, 'w') as cert_key_file:

+     with open(conf.ssl_certificate_key_file, "w") as cert_key_file:

          os.chmod(conf.ssl_certificate_key_file, 0o600)

-         cert_key_file.write(

-             crypto.dump_privatekey(crypto.FILETYPE_PEM, cert_key))

+         cert_key_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, cert_key))

  

      cert = crypto.X509()

      msg_cert_subject = cert.get_subject()

-     msg_cert_subject.C = 'US'

-     msg_cert_subject.ST = 'MA'

-     msg_cert_subject.L = 'Boston'

-     msg_cert_subject.O = 'Development'  # noqa

-     msg_cert_subject.CN = 'localhost'

+     msg_cert_subject.C = "US"

+     msg_cert_subject.ST = "MA"

+     msg_cert_subject.L = "Boston"

+     msg_cert_subject.O = "Development"  # noqa

+     msg_cert_subject.CN = "localhost"

      cert.set_serial_number(2)

      cert.gmtime_adj_notBefore(0)

      cert.gmtime_adj_notAfter(315360000)  # 10 years
@@ -133,16 +138,15 @@ 

      cert.set_pubkey(cert_key)

      cert_extensions = [

          crypto.X509Extension(

-             'keyUsage', True,

-             'digitalSignature, keyEncipherment, nonRepudiation'),

-         crypto.X509Extension('extendedKeyUsage', True, 'serverAuth'),

+             "keyUsage", True, "digitalSignature, keyEncipherment, nonRepudiation"

+         ),

+         crypto.X509Extension("extendedKeyUsage", True, "serverAuth"),

      ]

      cert.add_extensions(cert_extensions)

-     cert.sign(cert_key, 'sha256')

+     cert.sign(cert_key, "sha256")

  

-     with open(conf.ssl_certificate_file, 'w') as cert_file:

-         cert_file.write(

-             crypto.dump_certificate(crypto.FILETYPE_PEM, cert))

+     with open(conf.ssl_certificate_file, "w") as cert_file:

+         cert_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))

  

  

  @console_script_help
@@ -150,15 +154,10 @@ 

  def runssl(host=conf.host, port=conf.port, debug=conf.debug):

      """ Runs the Flask app with the HTTPS settings configured in config.py

      """

-     logging.info('Starting ODCS frontend')

+     logging.info("Starting ODCS frontend")

  

      ssl_ctx = _establish_ssl_context()

-     app.run(

-         host=host,

-         port=port,

-         ssl_context=ssl_ctx,

-         debug=debug

-     )

+     app.run(host=host, port=port, ssl_context=ssl_ctx, debug=debug)

  

  

  def manager_wrapper():

file modified
+30 -18
@@ -27,7 +27,8 @@ 

  from odcs.server import log

  

  import gi

- gi.require_version('Modulemd', '2.0')

+ 

+ gi.require_version("Modulemd", "2.0")

  from gi.repository import Modulemd  # noqa: E402

  

  
@@ -39,7 +40,7 @@ 

      def __init__(self, config):

          self.mbs_url = config.mbs_url.rstrip("/")

  

-     @retry(wait_on=(requests.ConnectionError, ), logger=log)

+     @retry(wait_on=(requests.ConnectionError,), logger=log)

      def get_modules(self, **params):

          url = self.mbs_url + "/1/module-builds/"

          r = requests.get(url, params=params)
@@ -73,14 +74,15 @@ 

              # we need to remove the "-devel" suffix from the NSVC.

              n = nsvc.split(":")[0]

              if n.endswith("-devel"):

-                 params["nsvc"] = n[:-len("-devel")] + params["nsvc"][len(n):]

+                 params["nsvc"] = n[: -len("-devel")] + params["nsvc"][len(n) :]

                  modules = self.get_modules(**params)

                  devel_module = True

  

          if not modules["meta"]["total"]:

              state_msg = "ready or done" if include_done else "ready"

              raise ModuleLookupError(

-                 "Failed to find module %s in %s state in the MBS." % (nsvc, state_msg))

+                 "Failed to find module %s in %s state in the MBS." % (nsvc, state_msg)

+             )

  

          ret = []

          # In case the nsvc is just "name:stream", there might be multiple
@@ -99,13 +101,14 @@ 

                  module["name"] += "-devel"

                  # Devel module always depend on the non-devel version

                  mmd = Modulemd.ModuleStream.read_string(

-                     module['modulemd'], strict=True, module_name=None, module_stream=None

+                     module["modulemd"],

+                     strict=True,

+                     module_name=None,

+                     module_stream=None,

                  )

                  mmd = mmd.upgrade(2)

                  for dep in mmd.get_dependencies():

-                     dep.add_runtime_stream(

-                         mmd.get_module_name(), mmd.get_stream_name()

-                     )

+                     dep.add_runtime_stream(mmd.get_module_name(), mmd.get_stream_name())

                  mod_index = Modulemd.ModuleIndex.new()

                  mod_index.add_module_stream(mmd)

                  module["modulemd"] = to_text_type(mod_index.dump_to_string())
@@ -125,7 +128,7 @@ 

          new_modules = []

          for module in modules:

              mmd = Modulemd.ModuleStream.read_string(

-                 module['modulemd'], strict=True, module_name=None, module_stream=None

+                 module["modulemd"], strict=True, module_name=None, module_stream=None

              )

              mmd = mmd.upgrade(2)

  
@@ -167,7 +170,7 @@ 

          module_map = defaultdict(list)

  

          for module in modules:

-             key = "%s:%s" % (module['name'], module['stream'])

+             key = "%s:%s" % (module["name"], module["stream"])

  

              # In case this is the first module with this name:stream,

              # just add it to new_modules.
@@ -179,17 +182,24 @@ 

  

              # Check if there is already this module in new_modules, but in

              # different version. If so, raise an exception.

-             if module['version'] != old_modules[0]['version']:

+             if module["version"] != old_modules[0]["version"]:

                  raise ModuleLookupError(

-                     "%s:%s:%s:%s conflicts with %s:%s:%s:%s" % (

-                         module['name'], module["stream"], module["version"],

-                         module["context"], old_modules[0]['name'],

-                         old_modules[0]["stream"], old_modules[0]["version"],

-                         old_modules[0]["context"]))

+                     "%s:%s:%s:%s conflicts with %s:%s:%s:%s"

+                     % (

+                         module["name"],

+                         module["stream"],

+                         module["version"],

+                         module["context"],

+                         old_modules[0]["name"],

+                         old_modules[0]["stream"],

+                         old_modules[0]["version"],

+                         old_modules[0]["context"],

+                     )

+                 )

  

              # Check if there is already this module in new_modules in the very

              # same context - do not add it there, because it would be duplicate.

-             if module['context'] in [m["context"] for m in old_modules]:

+             if module["context"] in [m["context"] for m in old_modules]:

                  continue

  

              # Add it to new_modules/module_map.
@@ -199,7 +209,9 @@ 

          if expand:

              added_module_list = new_modules

              while True:

-                 added_module_list = self._add_new_dependencies(module_map, added_module_list)

+                 added_module_list = self._add_new_dependencies(

+                     module_map, added_module_list

+                 )

                  if len(added_module_list) == 0:

                      break

                  new_modules.extend(added_module_list)

file modified
+16 -10
@@ -39,7 +39,7 @@ 

      def __init__(self, compose):

          self.compose = compose

  

-     @retry(wait_on=(requests.ConnectionError, ), logger=log)

+     @retry(wait_on=(requests.ConnectionError,), logger=log)

      def _download_file(self, path, url):

          """

          Downloads repodata file, stores it into `path`/repodata and returns
@@ -91,8 +91,7 @@ 

          # the existing repodata to save lot of time downloading other repodata

          # files.

          if repomd == last_repomd:

-             log.info("%r: Reusing cached repodata for %s",

-                      self.compose, baseurl)

+             log.info("%r: Reusing cached repodata for %s", self.compose, baseurl)

              return

  

          # In case the repomd.xml changed, remove everything from the repodata
@@ -106,7 +105,7 @@ 

          # to merge the repos.

          ns = "{http://linux.duke.edu/metadata/repo}"

          with ThreadPoolExecutor(5) as downloader:

-             for data in tree.findall('%sdata' % ns):

+             for data in tree.findall("%sdata" % ns):

                  if data.get("type").endswith("_db"):

                      continue

                  data_location = data.find("%slocation" % ns).get("href")
@@ -146,7 +145,10 @@ 

          # Generate the pulp_repo_cache structure and locks for each repo.

          for repo in repos:

              repo_path = os.path.join(

-                 self.compose.target_dir, "pulp_repo_cache", repo.replace(repo_prefix, ""))

+                 self.compose.target_dir,

+                 "pulp_repo_cache",

+                 repo.replace(repo_prefix, ""),

+             )

              repo_paths.append(repo_path)

              makedirs(repo_path)

  
@@ -161,16 +163,20 @@ 

                  self._download_repodata(repo_path, repo)

  

              log.info("%r: Starting mergerepo_c: %r", self.compose, repo_paths)

-             mergerepo_exe = find_executable('mergerepo_c')

+             mergerepo_exe = find_executable("mergerepo_c")

              if not mergerepo_exe:

                  raise RuntimeError("mergerepo_c is not available on system")

  

-             result_repo_dir = os.path.join(self.compose.result_repo_dir, repo_name, arch)

+             result_repo_dir = os.path.join(

+                 self.compose.result_repo_dir, repo_name, arch

+             )

              makedirs(result_repo_dir)

  

-             args = [mergerepo_exe, "--method", "nvr", "-o",

-                     result_repo_dir]

-             args += ["--repo-prefix-search", os.path.join(self.compose.target_dir, "pulp_repo_cache")]

+             args = [mergerepo_exe, "--method", "nvr", "-o", result_repo_dir]

+             args += [

+                 "--repo-prefix-search",

+                 os.path.join(self.compose.target_dir, "pulp_repo_cache"),

+             ]

              args += ["--repo-prefix-replace", repo_prefix]

              for repo in repo_paths:

                  args.append("-r")

file modified
+10 -10
@@ -28,7 +28,7 @@ 

  

  log = getLogger(__name__)

  

- __all__ = ('publish',)

+ __all__ = ("publish",)

  

  

  def publish(msgs):
@@ -45,10 +45,10 @@ 

      from rhmsg.activemq.producer import AMQProducer

  

      config = {

-         'urls': conf.messaging_broker_urls,

-         'certificate': conf.messaging_cert_file,

-         'private_key': conf.messaging_key_file,

-         'trusted_certificates': conf.messaging_ca_cert,

+         "urls": conf.messaging_broker_urls,

+         "certificate": conf.messaging_cert_file,

+         "private_key": conf.messaging_key_file,

+         "trusted_certificates": conf.messaging_ca_cert,

      }

      with AMQProducer(**config) as producer:

          producer.through_topic(conf.messaging_topic)
@@ -62,23 +62,23 @@ 

  def _fedora_messaging_send_msg(msgs):

      """Send message to fedora-messaging."""

      from fedora_messaging import api, config

+ 

      config.conf.setup_logging()

  

      for msg in msgs:

          # "event" is typically just "state-changed"

-         event = msg.get('event', 'event')

+         event = msg.get("event", "event")

          topic = "odcs.compose.%s" % event

  

          api.publish(api.Message(topic=topic, body=msg))

  

  

  def _get_messaging_backend():

-     if conf.messaging_backend == 'rhmsg':

+     if conf.messaging_backend == "rhmsg":

          return _umb_send_msg

-     elif conf.messaging_backend == 'fedora-messaging':

+     elif conf.messaging_backend == "fedora-messaging":

          return _fedora_messaging_send_msg

      elif conf.messaging_backend:

-         raise ValueError(

-             'Unknown messaging backend {0}'.format(conf.messaging_backend))

+         raise ValueError("Unknown messaging backend {0}".format(conf.messaging_backend))

      else:

          return None

file modified
+11 -6
@@ -45,14 +45,15 @@ 

  

  

  class ComposesCollector(object):

- 

      def composes_total(self):

          """

          Returns `composes_total` GaugeMetricFamily with number of composes

          for each state and source_type.

          """

          counter = GaugeMetricFamily(

-             "composes_total", "Total number of composes", labels=["source_type", "state"]

+             "composes_total",

+             "Total number of composes",

+             labels=["source_type", "state"],

          )

          for state in COMPOSE_STATES:

              for source_type in PUNGI_SOURCE_TYPE_NAMES:
@@ -78,11 +79,15 @@ 

          """

          counter = CounterMetricFamily(

              "raw_config_composes_count",

-             "Total number of raw_config composes per source", labels=["source"]

+             "Total number of raw_config composes per source",

+             labels=["source"],

+         )

+         composes = (

+             Compose.query.with_entities(Compose.source, func.count(Compose.source))

+             .filter(Compose.source_type == PUNGI_SOURCE_TYPE_NAMES["raw_config"])

+             .group_by(Compose.source)

+             .all()

          )

-         composes = Compose.query.with_entities(Compose.source, func.count(Compose.source)).filter(

-             Compose.source_type == PUNGI_SOURCE_TYPE_NAMES["raw_config"]

-         ).group_by(Compose.source).all()

  

          sources = {}

          for source, count in composes:

@@ -11,16 +11,18 @@ 

  # Interpret the config file for Python logging.

  # This line sets up loggers basically.

  fileConfig(config.config_file_name)

- logger = logging.getLogger('alembic.env')

+ logger = logging.getLogger("alembic.env")

  

  # add your model's MetaData object here

  # for 'autogenerate' support

  # from myapp import mymodel

  # target_metadata = mymodel.Base.metadata

  from flask import current_app

- config.set_main_option('sqlalchemy.url',

-                        current_app.config.get('SQLALCHEMY_DATABASE_URI'))

- target_metadata = current_app.extensions['migrate'].db.metadata

+ 

+ config.set_main_option(

+     "sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI")

+ )

+ target_metadata = current_app.extensions["migrate"].db.metadata

  

  # other values from the config, defined by the needs of env.py,

  # can be acquired:
@@ -59,21 +61,25 @@ 

      # when there are no changes to the schema

      # reference: http://alembic.readthedocs.org/en/latest/cookbook.html

      def process_revision_directives(context, revision, directives):

-         if getattr(config.cmd_opts, 'autogenerate', False):

+         if getattr(config.cmd_opts, "autogenerate", False):

              script = directives[0]

              if script.upgrade_ops.is_empty():

                  directives[:] = []

-                 logger.info('No changes in schema detected.')

+                 logger.info("No changes in schema detected.")

  

-     engine = engine_from_config(config.get_section(config.config_ini_section),

-                                 prefix='sqlalchemy.',

-                                 poolclass=pool.NullPool)

+     engine = engine_from_config(

+         config.get_section(config.config_ini_section),

+         prefix="sqlalchemy.",

+         poolclass=pool.NullPool,

+     )

  

      connection = engine.connect()

-     context.configure(connection=connection,

-                       target_metadata=target_metadata,

-                       process_revision_directives=process_revision_directives,

-                       **current_app.extensions['migrate'].configure_args)

+     context.configure(

+         connection=connection,

+         target_metadata=target_metadata,

+         process_revision_directives=process_revision_directives,

+         **current_app.extensions["migrate"].configure_args

+     )

  

      try:

          with context.begin_transaction():
@@ -81,6 +87,7 @@ 

      finally:

          connection.close()

  

+ 

  if context.is_offline_mode():

      run_migrations_offline()

  else:

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '0571a5ca58a0'

- down_revision = '0d4d8e1cfe29'

+ revision = "0571a5ca58a0"

+ down_revision = "0d4d8e1cfe29"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,11 +16,11 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.create_index(op.f('ix_composes_state'), 'composes', ['state'], unique=False)

+     op.create_index(op.f("ix_composes_state"), "composes", ["state"], unique=False)

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_index(op.f('ix_composes_state'), table_name='composes')

+     op.drop_index(op.f("ix_composes_state"), table_name="composes")

      # ### end Alembic commands ###

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '0d4d8e1cfe29'

- down_revision = '566733ac3811'

+ revision = "0d4d8e1cfe29"

+ down_revision = "566733ac3811"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,16 +16,17 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.create_table('users',

-     sa.Column('id', sa.Integer(), nullable=False),

-     sa.Column('username', sa.String(length=200), nullable=False),

-     sa.PrimaryKeyConstraint('id'),

-     sa.UniqueConstraint('username')

+     op.create_table(

+         "users",

+         sa.Column("id", sa.Integer(), nullable=False),

+         sa.Column("username", sa.String(length=200), nullable=False),

+         sa.PrimaryKeyConstraint("id"),

+         sa.UniqueConstraint("username"),

      )

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_table('users')

+     op.drop_table("users")

      # ### end Alembic commands ###

@@ -7,16 +7,18 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '11b350234051'

- down_revision = 'a8e259e0208c'

+ revision = "11b350234051"

+ down_revision = "a8e259e0208c"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('arches', sa.String(), nullable=True, default="x86_64"))

+     op.add_column(

+         "composes", sa.Column("arches", sa.String(), nullable=True, default="x86_64")

+     )

  

  

  def downgrade():

-     op.drop_column('composes', 'arches')

+     op.drop_column("composes", "arches")

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '3b92820da295'

- down_revision = '0571a5ca58a0'

+ revision = "3b92820da295"

+ down_revision = "0571a5ca58a0"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,11 +16,13 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.create_index(op.f('ix_composes_time_to_expire'), 'composes', ['time_to_expire'], unique=False)

+     op.create_index(

+         op.f("ix_composes_time_to_expire"), "composes", ["time_to_expire"], unique=False

+     )

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_index(op.f('ix_composes_time_to_expire'), table_name='composes')

+     op.drop_index(op.f("ix_composes_time_to_expire"), table_name="composes")

      # ### end Alembic commands ###

@@ -7,16 +7,16 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '4514febd31fa'

- down_revision = 'd1da07e15c54'

+ revision = "4514febd31fa"

+ down_revision = "d1da07e15c54"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('builds', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("builds", sa.String(), nullable=True))

  

  

  def downgrade():

-     op.drop_column('composes', 'builds')

+     op.drop_column("composes", "builds")

@@ -7,7 +7,7 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '566733ac3811'

+ revision = "566733ac3811"

  down_revision = None

  

  from alembic import op
@@ -16,27 +16,28 @@ 

  

  def upgrade():

      ### commands auto generated by Alembic - please adjust! ###

-     op.create_table('composes',

-     sa.Column('id', sa.Integer(), nullable=False),

-     sa.Column('owner', sa.String(), nullable=False),

-     sa.Column('source_type', sa.Integer(), nullable=False),

-     sa.Column('source', sa.String(), nullable=False),

-     sa.Column('koji_event', sa.Integer(), nullable=True),

-     sa.Column('state', sa.Integer(), nullable=False),

-     sa.Column('results', sa.Integer(), nullable=False),

-     sa.Column('packages', sa.String(), nullable=True),

-     sa.Column('flags', sa.Integer(), nullable=True),

-     sa.Column('reused_id', sa.Integer(), nullable=True),

-     sa.Column('time_to_expire', sa.DateTime(), nullable=False),

-     sa.Column('time_submitted', sa.DateTime(), nullable=False),

-     sa.Column('time_done', sa.DateTime(), nullable=True),

-     sa.Column('time_removed', sa.DateTime(), nullable=True),

-     sa.PrimaryKeyConstraint('id')

+     op.create_table(

+         "composes",

+         sa.Column("id", sa.Integer(), nullable=False),

+         sa.Column("owner", sa.String(), nullable=False),

+         sa.Column("source_type", sa.Integer(), nullable=False),

+         sa.Column("source", sa.String(), nullable=False),

+         sa.Column("koji_event", sa.Integer(), nullable=True),

+         sa.Column("state", sa.Integer(), nullable=False),

+         sa.Column("results", sa.Integer(), nullable=False),

+         sa.Column("packages", sa.String(), nullable=True),

+         sa.Column("flags", sa.Integer(), nullable=True),

+         sa.Column("reused_id", sa.Integer(), nullable=True),

+         sa.Column("time_to_expire", sa.DateTime(), nullable=False),

+         sa.Column("time_submitted", sa.DateTime(), nullable=False),

+         sa.Column("time_done", sa.DateTime(), nullable=True),

+         sa.Column("time_removed", sa.DateTime(), nullable=True),

+         sa.PrimaryKeyConstraint("id"),

      )

      ### end Alembic commands ###

  

  

  def downgrade():

      ### commands auto generated by Alembic - please adjust! ###

-     op.drop_table('composes')

+     op.drop_table("composes")

      ### end Alembic commands ###

@@ -7,16 +7,16 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '812f2745248f'

- down_revision = 'a855c39e2a0f'

+ revision = "812f2745248f"

+ down_revision = "a855c39e2a0f"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('target_dir', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("target_dir", sa.String(), nullable=True))

  

  

  def downgrade():

-     op.drop_column('composes', 'target_dir')

+     op.drop_column("composes", "target_dir")

@@ -7,16 +7,18 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = '82172e6a3154'

- down_revision = 'cd0781bbdab1'

+ revision = "82172e6a3154"

+ down_revision = "cd0781bbdab1"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('pungi_config_dump', sa.String(), nullable=True))

+     op.add_column(

+         "composes", sa.Column("pungi_config_dump", sa.String(), nullable=True)

+     )

  

  

  def downgrade():

-     op.drop_column('composes', 'pungi_config_dump')

+     op.drop_column("composes", "pungi_config_dump")

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'a855c39e2a0f'

- down_revision = '82172e6a3154'

+ revision = "a855c39e2a0f"

+ down_revision = "82172e6a3154"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,11 +16,11 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.add_column('composes', sa.Column('celery_task_id', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("celery_task_id", sa.String(), nullable=True))

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_column('composes', 'celery_task_id')

+     op.drop_column("composes", "celery_task_id")

      # ### end Alembic commands ###

@@ -7,16 +7,16 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'a8e259e0208c'

- down_revision = 'e2163db7b15d'

+ revision = "a8e259e0208c"

+ down_revision = "e2163db7b15d"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('state_reason', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("state_reason", sa.String(), nullable=True))

  

  

  def downgrade():

-     op.drop_column('composes', 'state_reason')

+     op.drop_column("composes", "state_reason")

@@ -7,16 +7,16 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'b2725d046624'

- down_revision = '4514febd31fa'

+ revision = "b2725d046624"

+ down_revision = "4514febd31fa"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('lookaside_repos', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("lookaside_repos", sa.String(), nullable=True))

  

  

  def downgrade():

-     op.drop_column('composes', 'lookaside_repos')

+     op.drop_column("composes", "lookaside_repos")

@@ -7,16 +7,16 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'b75ad2afc207'

- down_revision = 'c370b90de998'

+ revision = "b75ad2afc207"

+ down_revision = "c370b90de998"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('sigkeys', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("sigkeys", sa.String(), nullable=True))

  

  

  def downgrade():

-     op.drop_column('composes', 'sigkeys')

+     op.drop_column("composes", "sigkeys")

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'c370b90de998'

- down_revision = 'f24a36cc8a16'

+ revision = "c370b90de998"

+ down_revision = "f24a36cc8a16"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,11 +16,11 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.create_index('idx_source_type__state', 'composes', ['source_type', 'state'])

+     op.create_index("idx_source_type__state", "composes", ["source_type", "state"])

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_index('idx_source_type__state', table_name='composes')

+     op.drop_index("idx_source_type__state", table_name="composes")

      # ### end Alembic commands ###

@@ -7,20 +7,20 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'cd0781bbdab1'

- down_revision = 'de0a86d7de49'

+ revision = "cd0781bbdab1"

+ down_revision = "de0a86d7de49"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('compose_type', sa.String(), nullable=True))

-     op.add_column('composes', sa.Column('label', sa.String(), nullable=True))

-     op.add_column('composes', sa.Column('pungi_compose_id', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("compose_type", sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("label", sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("pungi_compose_id", sa.String(), nullable=True))

  

  

  def downgrade():

-     op.drop_column('composes', 'pungi_compose_id')

-     op.drop_column('composes', 'label')

-     op.drop_column('composes', 'compose_type')

+     op.drop_column("composes", "pungi_compose_id")

+     op.drop_column("composes", "label")

+     op.drop_column("composes", "compose_type")

@@ -7,20 +7,22 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'd1da07e15c54'

- down_revision = 'f4bc999818d7'

+ revision = "d1da07e15c54"

+ down_revision = "f4bc999818d7"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('multilib_arches', sa.String(), nullable=True,

-                                         default=""))

-     op.add_column('composes', sa.Column('multilib_method', sa.Integer(), nullable=True,

-                                         default=0))

+     op.add_column(

+         "composes", sa.Column("multilib_arches", sa.String(), nullable=True, default="")

+     )

+     op.add_column(

+         "composes", sa.Column("multilib_method", sa.Integer(), nullable=True, default=0)

+     )

  

  

  def downgrade():

-     op.drop_column('composes', 'multilib_method')

-     op.drop_column('composes', 'multilib_arches')

+     op.drop_column("composes", "multilib_method")

+     op.drop_column("composes", "multilib_arches")

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'de0a86d7de49'

- down_revision = 'e186faabdafe'

+ revision = "de0a86d7de49"

+ down_revision = "e186faabdafe"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,13 +16,15 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.add_column('composes', sa.Column('time_started', sa.DateTime(), nullable=True))

+     op.add_column("composes", sa.Column("time_started", sa.DateTime(), nullable=True))

      # ### end Alembic commands ###

      # Set the start time for all composes that are not waiting.

-     op.execute("UPDATE composes SET time_started = time_submitted WHERE time_started IS NULL AND state != 0")

+     op.execute(

+         "UPDATE composes SET time_started = time_submitted WHERE time_started IS NULL AND state != 0"

+     )

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_column('composes', 'time_started')

+     op.drop_column("composes", "time_started")

      # ### end Alembic commands ###

@@ -7,18 +7,22 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'e186faabdafe'

- down_revision = 'b2725d046624'

+ revision = "e186faabdafe"

+ down_revision = "b2725d046624"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('modular_koji_tags', sa.String(), nullable=True))

-     op.add_column('composes', sa.Column('module_defaults_url', sa.String(), nullable=True))

+     op.add_column(

+         "composes", sa.Column("modular_koji_tags", sa.String(), nullable=True)

+     )

+     op.add_column(

+         "composes", sa.Column("module_defaults_url", sa.String(), nullable=True)

+     )

  

  

  def downgrade():

-     op.drop_column('composes', 'module_defaults_url')

-     op.drop_column('composes', 'modular_koji_tags')

+     op.drop_column("composes", "module_defaults_url")

+     op.drop_column("composes", "modular_koji_tags")

@@ -7,18 +7,20 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'e2163db7b15d'

- down_revision = 'b75ad2afc207'

+ revision = "e2163db7b15d"

+ down_revision = "b75ad2afc207"

  

  from alembic import op

  import sqlalchemy as sa

  

  

  def upgrade():

-     op.add_column('composes', sa.Column('koji_task_id', sa.Integer(), nullable=True))

-     op.create_index(op.f('ix_composes_koji_task_id'), 'composes', ['koji_task_id'], unique=False)

+     op.add_column("composes", sa.Column("koji_task_id", sa.Integer(), nullable=True))

+     op.create_index(

+         op.f("ix_composes_koji_task_id"), "composes", ["koji_task_id"], unique=False

+     )

  

  

  def downgrade():

-     op.drop_index(op.f('ix_composes_koji_task_id'), table_name='composes')

-     op.drop_column('composes', 'koji_task_id')

+     op.drop_index(op.f("ix_composes_koji_task_id"), table_name="composes")

+     op.drop_column("composes", "koji_task_id")

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'f24a36cc8a16'

- down_revision = '3b92820da295'

+ revision = "f24a36cc8a16"

+ down_revision = "3b92820da295"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,11 +16,13 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.create_index(op.f('ix_composes_reused_id'), 'composes', ['reused_id'], unique=False)

+     op.create_index(

+         op.f("ix_composes_reused_id"), "composes", ["reused_id"], unique=False

+     )

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_index(op.f('ix_composes_reused_id'), table_name='composes')

+     op.drop_index(op.f("ix_composes_reused_id"), table_name="composes")

      # ### end Alembic commands ###

@@ -7,8 +7,8 @@ 

  """

  

  # revision identifiers, used by Alembic.

- revision = 'f4bc999818d7'

- down_revision = '11b350234051'

+ revision = "f4bc999818d7"

+ down_revision = "11b350234051"

  

  from alembic import op

  import sqlalchemy as sa
@@ -16,11 +16,11 @@ 

  

  def upgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.add_column('composes', sa.Column('removed_by', sa.String(), nullable=True))

+     op.add_column("composes", sa.Column("removed_by", sa.String(), nullable=True))

      # ### end Alembic commands ###

  

  

  def downgrade():

      # ### commands auto generated by Alembic - please adjust! ###

-     op.drop_column('composes', 'removed_by')

+     op.drop_column("composes", "removed_by")

      # ### end Alembic commands ###

@@ -249,7 +249,8 @@ 

  

      # Generate the Mock configuration using the standard Koji way.

      output = koji_module.genMockConfig(

-         runroot_key, arch, repoid=repo["id"], tag_name=tag_name, **opts)

+         runroot_key, arch, repoid=repo["id"], tag_name=tag_name, **opts

+     )

  

      # Write the Mock configuration to /tmp/`runroot_key`/mock.cfg.

      mock_cfg_path = os.path.join(runroot_tmp_path(runroot_key), "mock.cfg")
@@ -279,7 +280,8 @@ 

          if c != "-" and not c.isalnum():

              raise ValueError(

                  "Unexpected character '%s' in the runroot key \"%s\"."

-                 % (c, runroot_key))

+                 % (c, runroot_key)

+             )

  

  

  def mock_runroot_install(runroot_key, packages):
@@ -316,7 +318,7 @@ 

  

          # Wrap the runroot command in /bin/sh, because that's how Koji does

          # that and we need to stay compatible with this way...

-         sh_wrapper = ['/bin/sh', '-c', "{ %s; }" % (" ".join(cmd))]

+         sh_wrapper = ["/bin/sh", "-c", "{ %s; }" % (" ".join(cmd))]

  

          # Run the command in Mock chroot. We need to use the `--old-chroot`

          # here, otherwise Lorax fails.

file modified
+102 -71
@@ -37,17 +37,18 @@ 

  from odcs.server.events import cache_composes_if_state_changed

  from odcs.server.events import start_to_publish_messages

  from odcs.common.types import (

-     COMPOSE_STATES, INVERSE_COMPOSE_STATES, COMPOSE_FLAGS,

-     COMPOSE_RESULTS)

+     COMPOSE_STATES,

+     INVERSE_COMPOSE_STATES,

+     COMPOSE_FLAGS,

+     COMPOSE_RESULTS,

+ )

  

  from sqlalchemy import event, or_

  from flask_sqlalchemy import SignallingSession

  

- event.listen(SignallingSession, 'after_flush',

-              cache_composes_if_state_changed)

+ event.listen(SignallingSession, "after_flush", cache_composes_if_state_changed)

  

- event.listen(SignallingSession, 'after_commit',

-              start_to_publish_messages)

+ event.listen(SignallingSession, "after_commit", start_to_publish_messages)

  

  

  def commit_on_success(func):
@@ -59,6 +60,7 @@ 

              raise

          finally:

              db.session.commit()

+ 

      return _decorator

  

  
@@ -69,7 +71,7 @@ 

  class User(ODCSBase, UserMixin):

      """User information table"""

  

-     __tablename__ = 'users'

+     __tablename__ = "users"

  

      id = db.Column(db.Integer, primary_key=True)

      username = db.Column(db.String(200), nullable=False, unique=True)
@@ -184,12 +186,29 @@ 

          self._target_dir = value

  

      @classmethod

-     def create(cls, session, owner, source_type, source, results,

-                seconds_to_live, packages=None, flags=0, sigkeys=None,

-                koji_event=None, arches=None, multilib_arches=None,

-                multilib_method=None, builds=None, lookaside_repos=None,

-                modular_koji_tags=None, module_defaults_url=None,

-                label=None, compose_type=None, target_dir=None):

+     def create(

+         cls,

+         session,

+         owner,

+         source_type,

+         source,

+         results,

+         seconds_to_live,

+         packages=None,

+         flags=0,

+         sigkeys=None,

+         koji_event=None,

+         arches=None,

+         multilib_arches=None,

+         multilib_method=None,

+         builds=None,

+         lookaside_repos=None,

+         modular_koji_tags=None,

+         module_defaults_url=None,

+         label=None,

+         compose_type=None,

+         target_dir=None,

+     ):

          now = datetime.utcnow()

          compose = cls(

              owner=owner,
@@ -218,7 +237,9 @@ 

          return compose

  

      @classmethod

-     def create_copy(cls, session, compose, owner=None, seconds_to_live=None, sigkeys=None):

+     def create_copy(

+         cls, session, compose, owner=None, seconds_to_live=None, sigkeys=None

+     ):

          """

          Creates new compose with all the options influencing the resulting

          compose copied from the `compose`. The `owner` and `seconds_to_live`
@@ -294,16 +315,18 @@ 

          if not self.on_default_target_dir:

              return ""

  

-         return conf.target_dir_url + "/" \

-             + os.path.join(self.name, "compose", "Temporary")

+         return (

+             conf.target_dir_url + "/" + os.path.join(self.name, "compose", "Temporary")

+         )

  

      @property

      def result_repofile_path(self):

          """

          Returns path to .repo file.

          """

-         return os.path.join(self.toplevel_dir, "compose", "Temporary",

-                             self.name + ".repo")

+         return os.path.join(

+             self.toplevel_dir, "compose", "Temporary", self.name + ".repo"

+         )

  

      @property

      def result_repofile_url(self):
@@ -313,11 +336,13 @@ 

          if not self.on_default_target_dir:

              return ""

  

-         return conf.target_dir_url + "/" \

-             + os.path.join(self.name, "compose", "Temporary",

-                            self.name + ".repo")

+         return (

+             conf.target_dir_url

+             + "/"

+             + os.path.join(self.name, "compose", "Temporary", self.name + ".repo")

+         )

  

-     @validates('state')

+     @validates("state")

      def validate_state(self, key, field):

          if field in COMPOSE_STATES.values():

              return field
@@ -343,43 +368,45 @@ 

          if self.on_default_target_dir:

              target_dir = "default"

          else:

-             inverse_extra_target_dirs = {v: k for k, v in conf.extra_target_dirs.items()}

+             inverse_extra_target_dirs = {

+                 v: k for k, v in conf.extra_target_dirs.items()

+             }

              target_dir = inverse_extra_target_dirs.get(self.target_dir, "unknown")

  

          ret = {

-             'id': self.id,

-             'owner': self.owner,

-             'source_type': self.source_type,

-             'source': self.source,

-             'state': self.state,

-             'state_name': INVERSE_COMPOSE_STATES[self.state],

-             'state_reason': self.state_reason,

-             'time_to_expire': self._utc_datetime_to_iso(self.time_to_expire),

-             'time_submitted': self._utc_datetime_to_iso(self.time_submitted),

-             'time_started': self._utc_datetime_to_iso(self.time_started),

-             'time_done': self._utc_datetime_to_iso(self.time_done),

-             'time_removed': self._utc_datetime_to_iso(self.time_removed),

-             'removed_by': self.removed_by,

-             'result_repo': self.result_repo_url,

-             'result_repofile': self.result_repofile_url,

-             'toplevel_url': self.toplevel_url,

-             'flags': flags,

-             'results': results,

-             'sigkeys': self.sigkeys if self.sigkeys else "",

-             'koji_event': self.koji_event,

-             'koji_task_id': self.koji_task_id,

-             'packages': self.packages,

-             'builds': self.builds,

-             'arches': self.arches,

-             'multilib_arches': self.multilib_arches,

-             'multilib_method': self.multilib_method,

-             'lookaside_repos': self.lookaside_repos,

-             'modular_koji_tags': self.modular_koji_tags,

-             'module_defaults_url': self.module_defaults_url,

-             'label': self.label,

-             'compose_type': self.compose_type,

-             'pungi_compose_id': self.pungi_compose_id,

-             'target_dir': target_dir,

+             "id": self.id,

+             "owner": self.owner,

+             "source_type": self.source_type,

+             "source": self.source,

+             "state": self.state,

+             "state_name": INVERSE_COMPOSE_STATES[self.state],

+             "state_reason": self.state_reason,

+             "time_to_expire": self._utc_datetime_to_iso(self.time_to_expire),

+             "time_submitted": self._utc_datetime_to_iso(self.time_submitted),

+             "time_started": self._utc_datetime_to_iso(self.time_started),

+             "time_done": self._utc_datetime_to_iso(self.time_done),

+             "time_removed": self._utc_datetime_to_iso(self.time_removed),

+             "removed_by": self.removed_by,

+             "result_repo": self.result_repo_url,

+             "result_repofile": self.result_repofile_url,

+             "toplevel_url": self.toplevel_url,

+             "flags": flags,

+             "results": results,

+             "sigkeys": self.sigkeys if self.sigkeys else "",

+             "koji_event": self.koji_event,

+             "koji_task_id": self.koji_task_id,

+             "packages": self.packages,

+             "builds": self.builds,

+             "arches": self.arches,

+             "multilib_arches": self.multilib_arches,

+             "multilib_method": self.multilib_method,

+             "lookaside_repos": self.lookaside_repos,

+             "modular_koji_tags": self.modular_koji_tags,

+             "module_defaults_url": self.module_defaults_url,

+             "label": self.label,

+             "compose_type": self.compose_type,

+             "pungi_compose_id": self.pungi_compose_id,

+             "target_dir": target_dir,

          }

  

          if full:
@@ -404,29 +431,33 @@ 

      def composes_to_expire(cls):

          now = datetime.utcnow()

          return Compose.query.filter(

-             or_(Compose.state == COMPOSE_STATES["done"],

-                 Compose.state == COMPOSE_STATES["failed"]),

-             Compose.time_to_expire < now).all()

+             or_(

+                 Compose.state == COMPOSE_STATES["done"],

+                 Compose.state == COMPOSE_STATES["failed"],

+             ),

+             Compose.time_to_expire < now,

+         ).all()

  

      def __repr__(self):

          return "<Compose %r, type %r, state %s>" % (

-             self.id, self.source_type,

-             INVERSE_COMPOSE_STATES[self.state])

+             self.id,

+             self.source_type,

+             INVERSE_COMPOSE_STATES[self.state],

+         )

  

      def get_reused_compose(self):

          """Get compose this compose reuses"""

-         return db.session.query(Compose).filter(

-             Compose.id == self.reused_id).first()

+         return db.session.query(Compose).filter(Compose.id == self.reused_id).first()

  

      def get_reusing_composes(self):

          """Get composes that are reusing this compose"""

-         return db.session.query(Compose).filter(

-             Compose.reused_id == self.id).all()

+         return db.session.query(Compose).filter(Compose.reused_id == self.id).all()

  

      def extend_expiration(self, _from, seconds_to_live):

          """Extend time to expire"""

-         new_expiration = max(self.time_to_expire,

-                              _from + timedelta(seconds=seconds_to_live))

+         new_expiration = max(

+             self.time_to_expire, _from + timedelta(seconds=seconds_to_live)

+         )

          if new_expiration != self.time_to_expire:

              self.time_to_expire = new_expiration

  
@@ -440,11 +471,11 @@ 

          """

          self.state = to_state

          self.state_reason = reason

-         if to_state == COMPOSE_STATES['removed']:

+         if to_state == COMPOSE_STATES["removed"]:

              self.time_removed = happen_on or datetime.utcnow()

-         elif to_state == COMPOSE_STATES['done']:

+         elif to_state == COMPOSE_STATES["done"]:

              self.time_done = happen_on or datetime.utcnow()

-         elif to_state == COMPOSE_STATES['generating']:

+         elif to_state == COMPOSE_STATES["generating"]:

              self.time_started = happen_on or datetime.utcnow()

          if to_state in (COMPOSE_STATES["done"], COMPOSE_STATES["failed"]):

              ttl = self.time_to_expire - self.time_submitted
@@ -452,4 +483,4 @@ 

          db.session.commit()

  

  

- Index('idx_source_type__state', Compose.source_type, Compose.state)

+ Index("idx_source_type__state", Compose.source_type, Compose.state)

file modified
+11 -10
@@ -32,29 +32,30 @@ 

  

  

  class ReverseProxy(object):

-     '''Wrap the application in this middleware and configure the

+     """Wrap the application in this middleware and configure the

      front-end server to add these headers, to let you quietly bind

      this to a URL other than / and to an HTTP scheme that is

      different than what is used locally.

  

      :param app: the WSGI application

-     '''

+     """

+ 

      def __init__(self, app):

          self.app = app

  

      def __call__(self, environ, start_response):

-         script_name = environ.get('HTTP_X_SCRIPT_NAME', '')

+         script_name = environ.get("HTTP_X_SCRIPT_NAME", "")

          if script_name:

-             environ['SCRIPT_NAME'] = script_name

-             path_info = environ['PATH_INFO']

+             environ["SCRIPT_NAME"] = script_name

+             path_info = environ["PATH_INFO"]

              if path_info.startswith(script_name):

-                 environ['PATH_INFO'] = path_info[len(script_name):]

+                 environ["PATH_INFO"] = path_info[len(script_name) :]

  

-         server = environ.get('HTTP_X_FORWARDED_HOST', '')

+         server = environ.get("HTTP_X_FORWARDED_HOST", "")

          if server:

-             environ['HTTP_HOST'] = server

+             environ["HTTP_HOST"] = server

  

-         scheme = environ.get('HTTP_X_SCHEME', '')

+         scheme = environ.get("HTTP_X_SCHEME", "")

          if scheme:

-             environ['wsgi.url_scheme'] = scheme

+             environ["wsgi.url_scheme"] = scheme

          return self.app(environ, start_response)

file modified
+27 -24
@@ -40,14 +40,14 @@ 

          self.password = password

          self.server_url = server_url

          self.compose = compose

-         self.rest_api_root = '{0}/pulp/api/v2/'.format(self.server_url.rstrip('/'))

+         self.rest_api_root = "{0}/pulp/api/v2/".format(self.server_url.rstrip("/"))

  

      @retry(wait_on=requests.exceptions.RequestException)

      def _rest_post(self, endpoint, post_data):

          query_data = json.dumps(post_data)

          try:

              r = requests.post(

-                 '{0}{1}'.format(self.rest_api_root, endpoint.lstrip('/')),

+                 "{0}{1}".format(self.rest_api_root, endpoint.lstrip("/")),

                  query_data,

                  auth=(self.username, self.password),

                  timeout=conf.net_timeout,
@@ -89,14 +89,14 @@ 

                  # single arch in Pulp, but be defensive.

                  raise ValueError(

                      "Content set repository %s does not have exactly 1 arch: "

-                     "%r." % (repo["url"], repo["arches"]))

+                     "%r." % (repo["url"], repo["arches"])

+                 )

              url = repo["url"].replace(list(repo["arches"])[0], "$basearch")

              if first_repo is None:

                  first_repo = copy.deepcopy(repo)

                  first_repo["url"] = url

                  continue

-             if (first_repo["url"] != url or

-                     first_repo["sigkeys"] != repo["sigkeys"]):

+             if first_repo["url"] != url or first_repo["sigkeys"] != repo["sigkeys"]:

                  return {}

              first_repo["arches"] = first_repo["arches"].union(repo["arches"])

          return first_repo
@@ -123,7 +123,8 @@ 

                  # single arch in Pulp, but be defensive.

                  raise ValueError(

                      "Content set repository %s does not have exactly 1 arch: "

-                     "%r." % (repo["url"], repo["arches"]))

+                     "%r." % (repo["url"], repo["arches"])

+                 )

              arch = list(repo["arches"])[0]

              if arch not in per_arch_repos:

                  per_arch_repos[arch] = []
@@ -141,8 +142,9 @@ 

              "sigkeys": content_set_repos[0]["sigkeys"],

          }

  

-     def get_repos_from_content_sets(self, content_sets,

-                                     include_unpublished_repos=False):

+     def get_repos_from_content_sets(

+         self, content_sets, include_unpublished_repos=False

+     ):

          """

          Returns dictionary with URLs of all shipped repositories defined by

          the content_sets.
@@ -162,37 +164,38 @@ 

              }

          """

          query_data = {

-             'criteria': {

-                 'filters': {

-                     'notes.content_set': {'$in': content_sets},

-                 },

-                 'fields': ['notes'],

+             "criteria": {

+                 "filters": {"notes.content_set": {"$in": content_sets}},

+                 "fields": ["notes"],

              }

          }

  

          if not include_unpublished_repos:

-             query_data['criteria']['filters']['notes.include_in_download_service'] = 'True'

-         repos = self._rest_post('repositories/search/', query_data)

+             query_data["criteria"]["filters"][

+                 "notes.include_in_download_service"

+             ] = "True"

+         repos = self._rest_post("repositories/search/", query_data)

  

          per_content_set_repos = {}

          for repo in repos:

              notes = repo["notes"]

-             url = "%s/%s" % (self.server_url.rstrip('/'),

-                              notes['relative_url'])

+             url = "%s/%s" % (self.server_url.rstrip("/"), notes["relative_url"])

              arch = notes["arch"]

              sigkeys = sorted(notes["signatures"].split(","))

              # OSBS cannot verify https during the container image build, so

              # fallback to http for now.

              if url.startswith("https://"):

-                 url = "http://" + url[len("https://"):]

+                 url = "http://" + url[len("https://") :]

              if notes["content_set"] not in per_content_set_repos:

                  per_content_set_repos[notes["content_set"]] = []

-             per_content_set_repos[notes["content_set"]].append({

-                 "url": url,

-                 "arches": set([arch]),

-                 "sigkeys": sigkeys,

-                 "product_versions": notes["product_versions"],

-             })

+             per_content_set_repos[notes["content_set"]].append(

+                 {

+                     "url": url,

+                     "arches": set([arch]),

+                     "sigkeys": sigkeys,

+                     "product_versions": notes["product_versions"],

+                 }

+             )

  

          ret = {}

          for cs, repos in per_content_set_repos.items():

file modified
+92 -49
@@ -36,13 +36,16 @@ 

  from odcs.server import comps

  from odcs.server.models import Compose

  from odcs.common.types import (

-     PungiSourceType, COMPOSE_RESULTS, MULTILIB_METHODS,

-     INVERSE_PUNGI_SOURCE_TYPE_NAMES, COMPOSE_FLAGS)

+     PungiSourceType,

+     COMPOSE_RESULTS,

+     MULTILIB_METHODS,

+     INVERSE_PUNGI_SOURCE_TYPE_NAMES,

+     COMPOSE_FLAGS,

+ )

  from odcs.server.utils import makedirs, clone_repo, copytree

  

  

  class BasePungiConfig(object):

- 

      def __init__(self):

          self.pungi_timeout = conf.pungi_timeout

  
@@ -54,13 +57,12 @@ 

          :param str cfg: Configuration to write.

          """

          with open(path, "w") as f:

-             log.info("Writing %s configuration to %s.",

-                      os.path.basename(path), path)

+             log.info("Writing %s configuration to %s.", os.path.basename(path), path)

              f.write(cfg)

  

      def write_config_files(self, topdir):

          """Write configuration into files"""

-         raise NotImplementedError('Concrete config object must implement.')

+         raise NotImplementedError("Concrete config object must implement.")

  

      def validate(self, topdir, compose_dir):

          """Validate configuration. Raises an exception of error found."""
@@ -68,7 +70,6 @@ 

  

  

  class RawPungiConfig(BasePungiConfig):

- 

      def __init__(self, compose_source):

          super(RawPungiConfig, self).__init__()

          source_name, source_hash = compose_source.split("#")
@@ -82,7 +83,8 @@ 

          self.pungi_timeout = url_data.get("pungi_timeout", conf.pungi_timeout)

          self.pungi_cfg = url_data

          self.pungi_koji_args = conf.raw_config_pungi_koji_args.get(

-             source_name, conf.pungi_koji_args)

+             source_name, conf.pungi_koji_args

+         )

  

      def write_config_files(self, topdir):

          """Write raw config files
@@ -96,15 +98,15 @@ 

          # and override some variables.

          if conf.raw_config_wrapper_conf_path:

              main_cfg_path = os.path.join(topdir, "raw_config.conf")

-             shutil.copy2(conf.raw_config_wrapper_conf_path,

-                          os.path.join(topdir, "pungi.conf"))

+             shutil.copy2(

+                 conf.raw_config_wrapper_conf_path, os.path.join(topdir, "pungi.conf")

+             )

          else:

              main_cfg_path = os.path.join(topdir, "pungi.conf")

  

          # Clone the git repo with raw_config pungi config files.

          repo_dir = os.path.join(topdir, "raw_config_repo")

-         clone_repo(self.pungi_cfg["url"], repo_dir,

-                    commit=self.pungi_cfg["commit"])

+         clone_repo(self.pungi_cfg["url"], repo_dir, commit=self.pungi_cfg["commit"])

  

          # If the 'path' is defined, copy only the files form the 'path'

          # to topdir.
@@ -126,12 +128,16 @@ 

          # Apply global schema override.

          if conf.raw_config_schema_override:

              pungi_config_validate_cmd += [

-                 "--schema-override", conf.raw_config_schema_override]

+                 "--schema-override",

+                 conf.raw_config_schema_override,

+             ]

  

          # Apply raw_config specific schema override.

          if "schema_override" in self.pungi_cfg:

              pungi_config_validate_cmd += [

-                 "--schema-override", self.pungi_cfg["schema_override"]]

+                 "--schema-override",

+                 self.pungi_cfg["schema_override"],

+             ]

  

          # Add raw_config configuration file to validate.

          pungi_config_validate_cmd.append(os.path.join(topdir, "pungi.conf"))
@@ -143,15 +149,29 @@ 

          with open(log_out_path, "w") as log_out:

              with open(log_err_path, "w") as log_err:

                  odcs.server.utils.execute_cmd(

-                     pungi_config_validate_cmd, stdout=log_out, stderr=log_err)

+                     pungi_config_validate_cmd, stdout=log_out, stderr=log_err

+                 )

  

  

  class PungiConfig(BasePungiConfig):

-     def __init__(self, release_name, release_version, source_type, source,

-                  packages=None, arches=None, sigkeys=None, results=0,

-                  multilib_arches=None, multilib_method=0, builds=None,

-                  flags=0, lookaside_repos=None, modular_koji_tags=None,

-                  module_defaults_url=None):

+     def __init__(

+         self,

+         release_name,

+         release_version,

+         source_type,

+         source,

+         packages=None,

+         arches=None,

+         sigkeys=None,

+         results=0,

+         multilib_arches=None,

+         multilib_method=0,

+         builds=None,

+         flags=0,

+         lookaside_repos=None,

+         modular_koji_tags=None,

+         module_defaults_url=None,

+     ):

          super(PungiConfig, self).__init__()

          self.release_name = release_name

          self.release_version = release_version
@@ -187,8 +207,12 @@ 

              self.bootable = True

  

          if source_type == PungiSourceType.KOJI_TAG:

-             self.koji_module_tags = modular_koji_tags.split(" ") if modular_koji_tags else []

-             self.module_defaults_url = module_defaults_url.split(" ") if module_defaults_url else []

+             self.koji_module_tags = (

+                 modular_koji_tags.split(" ") if modular_koji_tags else []

+             )

+             self.module_defaults_url = (

+                 module_defaults_url.split(" ") if module_defaults_url else []

+             )

              self.koji_tag = source

              self.gather_source = "comps"

              if self.koji_module_tags:
@@ -199,14 +223,19 @@ 

              self.koji_tag = None

              self.gather_source = "module"

              self.gather_method = "nodeps"

-             self.module_defaults_url = module_defaults_url.split(" ") if module_defaults_url else []

+             self.module_defaults_url = (

+                 module_defaults_url.split(" ") if module_defaults_url else []

+             )

  

              if self.packages:

-                 raise ValueError("Exact packages cannot be set for MODULE "

-                                  "source type.")

-         elif source_type in [PungiSourceType.BUILD,

-                              PungiSourceType.PUNGI_COMPOSE,

-                              PungiSourceType.REPO]:

+                 raise ValueError(

+                     "Exact packages cannot be set for MODULE " "source type."

+                 )

+         elif source_type in [

+             PungiSourceType.BUILD,

+             PungiSourceType.PUNGI_COMPOSE,

+             PungiSourceType.REPO,

+         ]:

              self.gather_source = "comps"

              self.gather_method = "deps"

              self.koji_tag = None
@@ -233,14 +262,16 @@ 

      @property

      def pkgset_source(self):

          if self.source_type == PungiSourceType.REPO:

-             return 'repos'

-         return 'koji'

+             return "repos"

+         return "koji"

  

      def get_comps_config(self):

          if self.source_type == PungiSourceType.MODULE:

              return ""

          odcs_comps = comps.Comps()

-         odcs_group = comps.Group('odcs-group', 'odcs-group', 'ODCS compose default group')

+         odcs_group = comps.Group(

+             "odcs-group", "odcs-group", "ODCS compose default group"

+         )

          for package in self.packages:

              odcs_group.add_package(comps.Package(package))

          odcs_comps.add_group(odcs_group)
@@ -250,7 +281,9 @@ 

  

      def get_variants_config(self):

          odcs_product = comps.Product()

-         tmp_variant = comps.Variant('Temporary', 'Temporary', 'variant', self.source_type)

+         tmp_variant = comps.Variant(

+             "Temporary", "Temporary", "variant", self.source_type

+         )

          for arch in self.arches:

              tmp_variant.add_arch(comps.Arch(arch))

          if self.source_type == PungiSourceType.MODULE:
@@ -258,7 +291,11 @@ 

                  tmp_variant.add_module(comps.Module(module))

          elif self.source_type == PungiSourceType.KOJI_TAG:

              if self.packages:

-                 tmp_variant.add_group(comps.Group('odcs-group', 'odcs-group', 'ODCS compose default group'))

+                 tmp_variant.add_group(

+                     comps.Group(

+                         "odcs-group", "odcs-group", "ODCS compose default group"

+                     )

+                 )

              if self.koji_module_tags:

                  tmp_variant.add_module(comps.Module("*"))

  
@@ -274,8 +311,10 @@ 

              return template.render(config=self)

          except Exception as e:

              log.exception(

-                 "Failed to render pungi conf template {!r}: {}".format(conf.pungi_conf_path,

-                                                                        str(e)))

+                 "Failed to render pungi conf template {!r}: {}".format(

+                     conf.pungi_conf_path, str(e)

+                 )

+             )

  

      def write_config_files(self, topdir):

          """
@@ -309,7 +348,9 @@ 

          self._stop_event.set()

  

      def run(self):

-         p = os.path.join(self.compose.toplevel_dir, "work", "global", "composeinfo-base.json")

+         p = os.path.join(

+             self.compose.toplevel_dir, "work", "global", "composeinfo-base.json"

+         )

          while not self._stop_event.is_set():

              time.sleep(1)

  
@@ -372,7 +413,7 @@ 

          elif isinstance(self.pungi_cfg, PungiConfig):

              pungi_cmd += conf.pungi_koji_args

          else:

-             raise RuntimeError('Unknown pungi config type to handle.')

+             raise RuntimeError("Unknown pungi config type to handle.")

  

          compose_type_to_arg = {

              "test": "--test",
@@ -431,7 +472,8 @@ 

          while True:

              ci.compose.id = ci.create_compose_id()

              existing_compose = Compose.query.filter(

-                 Compose.pungi_compose_id == ci.compose.id).first()

+                 Compose.pungi_compose_id == ci.compose.id

+             ).first()

              if not existing_compose:

                  break

              ci.compose.respin += 1
@@ -480,8 +522,12 @@ 

              with open(log_out_path, "w") as log_out:

                  with open(log_err_path, "w") as log_err:

                      odcs.server.utils.execute_cmd(

-                         pungi_cmd, cwd=td, timeout=self.pungi_cfg.pungi_timeout,

-                         stdout=log_out, stderr=log_err)

+                         pungi_cmd,

+                         cwd=td,

+                         timeout=self.pungi_cfg.pungi_timeout,

+                         stdout=log_out,

+                         stderr=log_err,

+                     )

          finally:

              if compose_id_thread:

                  compose_id_thread.stop()
@@ -490,8 +536,8 @@ 

                      shutil.rmtree(td)

              except Exception as e:

                  log.warning(

-                     "Failed to remove temporary directory {!r}: {}".format(

-                         td, str(e)))

+                     "Failed to remove temporary directory {!r}: {}".format(td, str(e))

+                 )

  

      def run(self, compose):

          """
@@ -516,8 +562,7 @@ 

          toplevel_dir = self.compose.toplevel_dir

          if not toplevel_dir:

              return None

-         return os.path.join(

-             toplevel_dir, "logs", "global", "pungi.global.log")

+         return os.path.join(toplevel_dir, "logs", "global", "pungi.global.log")

  

      @property

      def config_dump_path(self):
@@ -527,8 +572,7 @@ 

          toplevel_dir = self.compose.toplevel_dir

          if not toplevel_dir:

              return None

-         return os.path.join(

-             toplevel_dir, "logs", "global", "config-dump.global.log")

+         return os.path.join(toplevel_dir, "logs", "global", "config-dump.global.log")

  

      def _get_global_log_errors(self):

          """
@@ -555,7 +599,7 @@ 

                          continue

                      if error:

                          errors.append(error)

-                     error = line[idx + len("[ERROR   ] "):]

+                     error = line[idx + len("[ERROR   ] ") :]

          except IOError:

              pass

          return errors
@@ -580,8 +624,7 @@ 

                  break

  

          if self.compose.on_default_target_dir:

-             errors = errors.replace(

-                 conf.target_dir, conf.target_dir_url)

+             errors = errors.replace(conf.target_dir, conf.target_dir_url)

          return errors

  

      def get_config_dump(self):

@@ -42,8 +42,7 @@ 

          # The `variant_name` is for example `Server`

          self.variant_name = os.path.basename(self.variant_url)

          # The `metadata_url` is for example "http://localhost/foo/compose/metadata".

-         self.metadata_url = os.path.join(

-             os.path.dirname(self.variant_url), "metadata")

+         self.metadata_url = os.path.join(os.path.dirname(self.variant_url), "metadata")

  

      def _fetch_json(self, url):

          """
@@ -77,11 +76,12 @@ 

          # for the right variant.

          url = os.path.join(self.metadata_url, "rpms.json")

          data = self._fetch_json(url)

-         srpms_per_arch = data.get("payload", {}).get("rpms", {}).get(

-             self.variant_name)

+         srpms_per_arch = data.get("payload", {}).get("rpms", {}).get(self.variant_name)

          if not srpms_per_arch:

-             raise ValueError("The %s does not contain payload -> rpms -> %s "

-                              "section" % (url, self.variant_name))

+             raise ValueError(

+                 "The %s does not contain payload -> rpms -> %s "

+                 "section" % (url, self.variant_name)

+             )

  

          # Go through the data and fill in the dict to return.

          for arch, srpms in srpms_per_arch.items():
@@ -93,7 +93,8 @@ 

                      ret["sigkeys"].add(rpm_data["sigkey"])

  

                  srpm_nvr = "{name}-{version}-{release}".format(

-                     **productmd.common.parse_nvra(srpm_nevra))

+                     **productmd.common.parse_nvra(srpm_nevra)

+                 )

                  ret["builds"][srpm_nvr] = packages

  

          return ret

file modified
+28 -10
@@ -43,8 +43,14 @@ 

          return s

  

  

- def retry(timeout=conf.net_timeout, interval=conf.net_retry_interval, wait_on=Exception, logger=None):

+ def retry(

+     timeout=conf.net_timeout,

+     interval=conf.net_retry_interval,

+     wait_on=Exception,

+     logger=None,

+ ):

      """A decorator that allows to retry a section of code until success or timeout."""

+ 

      def wrapper(function):

          @functools.wraps(function)

          def inner(*args, **kwargs):
@@ -54,12 +60,18 @@ 

                      return function(*args, **kwargs)

                  except wait_on as e:

                      if logger is not None:

-                         logger.warn("Exception %r raised from %r.  Retry in %rs",

-                                     e, function, interval)

+                         logger.warn(

+                             "Exception %r raised from %r.  Retry in %rs",

+                             e,

+                             function,

+                             interval,

+                         )

                      time.sleep(interval)

                      if (time.time() - start) >= timeout:

                          raise  # This re-raises the last exception.

+ 

          return inner

+ 

      return wrapper

  

  
@@ -102,8 +114,9 @@ 

      # group so we can kill the main process and also children processes in

      # case of timeout.

      log.info("Executing command: %s%s" % (args, out_log_msg))

-     proc = subprocess.Popen(args, stdout=stdout, stderr=stderr, cwd=cwd,

-                             preexec_fn=os.setsid)

+     proc = subprocess.Popen(

+         args, stdout=stdout, stderr=stderr, cwd=cwd, preexec_fn=os.setsid

+     )

  

      # Setup timer to kill whole process group if needed.

      if timeout:
@@ -123,19 +136,24 @@ 

      if timeout_expired:

          raise RuntimeError(

              "Compose has taken more time than allowed by configuration "

-             "(%d seconds)" % conf.pungi_timeout)

+             "(%d seconds)" % conf.pungi_timeout

+         )

  

      if proc.returncode != 0:

-         err_msg = "Command '%s' returned non-zero value %d%s" % (args, proc.returncode, out_log_msg)

+         err_msg = "Command '%s' returned non-zero value %d%s" % (

+             args,

+             proc.returncode,

+             out_log_msg,

+         )

          raise RuntimeError(err_msg)

  

  

- def clone_repo(url, dest, branch='master', commit=None):

-     cmd = ['git', 'clone', '-b', branch, url, dest]

+ def clone_repo(url, dest, branch="master", commit=None):

+     cmd = ["git", "clone", "-b", branch, url, dest]

      execute_cmd(cmd)

  

      if commit:

-         cmd = ['git', 'checkout', commit]

+         cmd = ["git", "checkout", commit]

          execute_cmd(cmd, cwd=dest)

  

      return dest

file modified
+150 -131
@@ -32,68 +32,49 @@ 

  from odcs.server.errors import NotFound, Forbidden

  from odcs.server.models import Compose

  from odcs.common.types import (

-     COMPOSE_RESULTS, COMPOSE_FLAGS, COMPOSE_STATES, PUNGI_SOURCE_TYPE_NAMES,

-     PungiSourceType, MULTILIB_METHODS)

+     COMPOSE_RESULTS,

+     COMPOSE_FLAGS,

+     COMPOSE_STATES,

+     PUNGI_SOURCE_TYPE_NAMES,

+     PungiSourceType,

+     MULTILIB_METHODS,

+ )

  from odcs.server.api_utils import (

-     pagination_metadata, filter_composes, validate_json_data,

-     raise_if_input_not_allowed)

+     pagination_metadata,

+     filter_composes,

+     validate_json_data,

+     raise_if_input_not_allowed,

+ )

  from odcs.server.auth import requires_role, login_required, has_role

  from odcs.server.auth import require_scopes

  from odcs.server.metrics import registry

  

  try:

      from odcs.server.celery_tasks import celery_app, schedule_compose

+ 

      CELERY_AVAILABLE = True

  except ImportError:

-     log.exception(

-         "Cannot import celery_tasks. The Celery support is turned off.")

+     log.exception("Cannot import celery_tasks. The Celery support is turned off.")

      CELERY_AVAILABLE = False

  

  

  api_v1 = {

-     'composes': {

-         'url': '/api/1/composes/',

-         'options': {

-             'defaults': {'id': None},

-             'methods': ['GET'],

-         }

-     },

-     'compose': {

-         'url': '/api/1/composes/<int:id>',

-         'options': {

-             'methods': ['GET'],

-         }

-     },

-     'composes_post': {

-         'url': '/api/1/composes/',

-         'options': {

-             'methods': ['POST'],

-         }

-     },

-     'compose_regenerate': {

-         'url': '/api/1/composes/<int:id>',

-         'options': {

-             'methods': ['PATCH'],

-         }

-     },

-     'composes_delete': {

-         'url': '/api/1/composes/<int:id>',

-         'options': {

-             'methods': ['DELETE'],

-         }

+     "composes": {

+         "url": "/api/1/composes/",

+         "options": {"defaults": {"id": None}, "methods": ["GET"]},

      },

-     'about': {

-         'url': '/api/1/about/',

-         'options': {

-             'methods': ['GET']

-         }

+     "compose": {"url": "/api/1/composes/<int:id>", "options": {"methods": ["GET"]}},

+     "composes_post": {"url": "/api/1/composes/", "options": {"methods": ["POST"]}},

+     "compose_regenerate": {

+         "url": "/api/1/composes/<int:id>",

+         "options": {"methods": ["PATCH"]},

      },

-     'metrics': {

-         'url': '/api/1/metrics/',

-         'options': {

-             'methods': ['GET']

-         }

+     "composes_delete": {

+         "url": "/api/1/composes/<int:id>",

+         "options": {"methods": ["DELETE"]},

      },

+     "about": {"url": "/api/1/about/", "options": {"methods": ["GET"]}},

+     "metrics": {"url": "/api/1/metrics/", "options": {"methods": ["GET"]}},

  }

  

  
@@ -102,7 +83,8 @@ 

          if conf.auth_backend == "noauth":

              log.warning(

                  "Cannot determine the owner of compose, because "

-                 "'noauth' auth_backend is used.")

+                 "'noauth' auth_backend is used."

+             )

              return "unknown"

          else:

              return g.user.username
@@ -116,8 +98,7 @@ 

              try:

                  return min(int(seconds_to_live), conf.max_seconds_to_live)

              except ValueError:

-                 err = 'Invalid seconds_to_live specified in request: %s' % \

-                     request_data

+                 err = "Invalid seconds_to_live specified in request: %s" % request_data

                  log.error(err)

                  raise ValueError(err)

          else:
@@ -152,8 +133,8 @@ 

              p_query = filter_composes(request)

  

              json_data = {

-                 'meta': pagination_metadata(p_query, request.args),

-                 'items': [item.json() for item in p_query.items]

+                 "meta": pagination_metadata(p_query, request.args),

+                 "items": [item.json() for item in p_query.items],

              }

  

              return jsonify(json_data), 200
@@ -163,11 +144,11 @@ 

              if compose:

                  return jsonify(compose.json(True)), 200

              else:

-                 raise NotFound('No such compose found.')

+                 raise NotFound("No such compose found.")

  

      @login_required

-     @require_scopes('renew-compose')

-     @requires_role('allowed_clients')

+     @require_scopes("renew-compose")

+     @requires_role("allowed_clients")

      def patch(self, id):

          """ Extends the compose expiration time or regenerates expired compose.

  
@@ -191,9 +172,13 @@ 

          old_compose = Compose.query.filter(

              Compose.id == id,

              Compose.state.in_(

-                 [COMPOSE_STATES["removed"],

+                 [

+                     COMPOSE_STATES["removed"],

                      COMPOSE_STATES["done"],

-                     COMPOSE_STATES["failed"]])).first()

+                     COMPOSE_STATES["failed"],

+                 ]

+             ),

+         ).first()

  

          if not old_compose:

              err = "No compose with id %s found" % id
@@ -207,7 +192,7 @@ 

  

          sigkeys = ""

          if "sigkeys" in data:

-             sigkeys = ' '.join(data["sigkeys"])

+             sigkeys = " ".join(data["sigkeys"])

          else:

              sigkeys = old_compose.sigkeys

  
@@ -217,21 +202,28 @@ 

              raw_config_key = old_compose.source.split("#")[0]

  

          raise_if_input_not_allowed(

-             source_types=old_compose.source_type, sources=old_compose.source,

-             results=old_compose.results, flags=old_compose.flags,

-             arches=old_compose.arches, compose_types=compose_type,

-             raw_config_keys=raw_config_key)

+             source_types=old_compose.source_type,

+             sources=old_compose.source,

+             results=old_compose.results,

+             flags=old_compose.flags,

+             arches=old_compose.arches,

+             compose_types=compose_type,

+             raw_config_keys=raw_config_key,

+         )

  

          has_to_create_a_copy = (

-             old_compose.state in (COMPOSE_STATES['removed'], COMPOSE_STATES['failed']) or

-             sigkeys != old_compose.sigkeys)

+             old_compose.state in (COMPOSE_STATES["removed"], COMPOSE_STATES["failed"])

+             or sigkeys != old_compose.sigkeys

+         )

          if has_to_create_a_copy:

              log.info("%r: Going to regenerate the compose", old_compose)

-             compose = Compose.create_copy(db.session,

-                                           old_compose,

-                                           self._get_compose_owner(),

-                                           seconds_to_live,

-                                           sigkeys=sigkeys)

+             compose = Compose.create_copy(

+                 db.session,

+                 old_compose,

+                 self._get_compose_owner(),

+                 seconds_to_live,

+                 sigkeys=sigkeys,

+             )

              db.session.add(compose)

              # Flush is needed, because we use `before_commit` SQLAlchemy

              # event to send message and before_commit can be called before
@@ -248,8 +240,11 @@ 

              # time.

              extend_from = datetime.datetime.utcnow()

              old_compose.extend_expiration(extend_from, seconds_to_live)

-             log.info('Extended time_to_expire for compose %r to %s',

-                      old_compose, old_compose.time_to_expire)

+             log.info(

+                 "Extended time_to_expire for compose %r to %s",

+                 old_compose,

+                 old_compose.time_to_expire,

+             )

              # As well as extending those composes that reuse this this compose,

              # and the one this compose reuses.

              reused_compose = old_compose.get_reused_compose()
@@ -261,8 +256,8 @@ 

              return jsonify(old_compose.json()), 200

  

      @login_required

-     @require_scopes('new-compose')

-     @requires_role('allowed_clients')

+     @require_scopes("new-compose")

+     @requires_role("allowed_clients")

      def post(self):

          """ Creates new ODCS compose request.

  
@@ -290,13 +285,13 @@ 

          """

          data = request.get_json(force=True)

          if not data:

-             raise ValueError('No JSON POST data submitted')

+             raise ValueError("No JSON POST data submitted")

  

          validate_json_data(data)

  

          seconds_to_live = self._get_seconds_to_live(data)

  

-         source_data = data.get('source', None)

+         source_data = data.get("source", None)

          if not isinstance(source_data, dict):

              err = "Invalid source configuration provided: %s" % str(data)

              log.error(err)
@@ -305,7 +300,10 @@ 

          needed_keys = ["type"]

          for key in needed_keys:

              if key not in source_data:

-                 err = "Missing %s in source configuration, received: %s" % (key, str(source_data))

+                 err = "Missing %s in source configuration, received: %s" % (

+                     key,

+                     str(source_data),

+                 )

                  log.error(err)

                  raise ValueError(err)

  
@@ -332,21 +330,25 @@ 

          if source_type == PungiSourceType.RAW_CONFIG:

              if len(source) > 1:

                  raise ValueError(

-                     'Only single source is allowed for "raw_config" '

-                     'source_type')

+                     'Only single source is allowed for "raw_config" ' "source_type"

+                 )

  

              source_name_hash = source[0].split("#")

-             if (len(source_name_hash) != 2 or not source_name_hash[0] or

-                     not source_name_hash[1]):

+             if (

+                 len(source_name_hash) != 2

+                 or not source_name_hash[0]

+                 or not source_name_hash[1]

+             ):

                  raise ValueError(

                      'Source must be in "source_name#commit_hash" format for '

-                     '"raw_config" source_type.')

+                     '"raw_config" source_type.'

+                 )

  

              source_name, source_hash = source_name_hash

              if source_name not in conf.raw_config_urls:

                  raise ValueError(

-                     'Source "%s" does not exist in server configuration.' %

-                     source_name)

+                     'Source "%s" does not exist in server configuration.' % source_name

+                 )

              raw_config_key = source_name

          elif source_type == PungiSourceType.MODULE:

              for module_str in source:
@@ -354,29 +356,31 @@ 

                  if len(nsvc) < 2:

                      raise ValueError(

                          'Module definition must be in "n:s", "n:s:v" or '

-                         '"n:s:v:c" format, but got %s' % module_str)

+                         '"n:s:v:c" format, but got %s' % module_str

+                     )

                  if nsvc[0] in conf.base_module_names:

                      raise ValueError(

                          "ODCS currently cannot create compose with base "

-                         "modules, but %s was requested." % nsvc[0])

+                         "modules, but %s was requested." % nsvc[0]

+                     )

  

-         source = ' '.join(source)

+         source = " ".join(source)

  

          packages = None

          if "packages" in source_data:

-             packages = ' '.join(source_data["packages"])

+             packages = " ".join(source_data["packages"])

  

          builds = None

          if "builds" in source_data:

-             builds = ' '.join(source_data["builds"])

+             builds = " ".join(source_data["builds"])

  

          sigkeys = ""

          if "sigkeys" in source_data:

-             sigkeys = ' '.join(source_data["sigkeys"])

+             sigkeys = " ".join(source_data["sigkeys"])

          else:

-             sigkeys = ' '.join(conf.sigkeys)

+             sigkeys = " ".join(conf.sigkeys)

  

-         koji_event = source_data.get('koji_event', None)

+         koji_event = source_data.get("koji_event", None)

  

          flags = 0

          if "flags" in data:
@@ -394,7 +398,7 @@ 

  

          arches = None

          if "arches" in data:

-             arches = ' '.join(data["arches"])

+             arches = " ".join(data["arches"])

          else:

              arches = " ".join(conf.arches)

  
@@ -410,12 +414,12 @@ 

          if "multilib_method" in data:

              for name in data["multilib_method"]:

                  if name not in MULTILIB_METHODS:

-                     raise ValueError("Unknown multilib method \"%s\"" % name)

+                     raise ValueError('Unknown multilib method "%s"' % name)

                  multilib_method |= MULTILIB_METHODS[name]

  

          modular_koji_tags = None

          if "modular_koji_tags" in source_data:

-             modular_koji_tags = ' '.join(source_data["modular_koji_tags"])

+             modular_koji_tags = " ".join(source_data["modular_koji_tags"])

  

          module_defaults_url = None

          if "module_defaults_url" in source_data:
@@ -430,7 +434,8 @@ 

          if bool(module_defaults_url) ^ bool(module_defaults_commit):

              raise ValueError(

                  'The "module_defaults_url" and "module_defaults_commit" '

-                 'must be used together.')

+                 "must be used together."

+             )

          elif module_defaults_url and module_defaults_commit:

              module_defaults = "%s %s" % (module_defaults_url, module_defaults_commit)

  
@@ -446,14 +451,28 @@ 

              target_dir = conf.target_dir

  

          raise_if_input_not_allowed(

-             source_types=source_type, sources=source, results=results,

-             flags=flags, arches=arches, compose_types=compose_type,

-             target_dirs=target_dir, raw_config_keys=raw_config_key)

+             source_types=source_type,

+             sources=source,

+             results=results,

+             flags=flags,

+             arches=arches,

+             compose_types=compose_type,

+             target_dirs=target_dir,

+             raw_config_keys=raw_config_key,

+         )

  

          compose = Compose.create(

-             db.session, self._get_compose_owner(), source_type, source,

-             results, seconds_to_live,

-             packages, flags, sigkeys, koji_event, arches,

+             db.session,

+             self._get_compose_owner(),

+             source_type,

+             source,

+             results,

+             seconds_to_live,

+             packages,

+             flags,

+             sigkeys,

+             koji_event,

+             arches,

              multilib_arches=multilib_arches,

              multilib_method=multilib_method,

              builds=builds,
@@ -462,7 +481,8 @@ 

              module_defaults_url=module_defaults,

              label=label,

              compose_type=compose_type,

-             target_dir=target_dir)

+             target_dir=target_dir,

+         )

          db.session.add(compose)

          # Flush is needed, because we use `before_commit` SQLAlchemy event to

          # send message and before_commit can be called before flush and
@@ -476,7 +496,7 @@ 

          return jsonify(compose.json()), 200

  

      @login_required

-     @require_scopes('delete-compose')

+     @require_scopes("delete-compose")

      def delete(self, id):

          """Cancels waiting compose or marks finished compose as expired to be

          removed later from ODCS storage. The compose metadata are still stored
@@ -496,7 +516,7 @@ 

          """

          compose = Compose.query.filter_by(id=id).first()

          if not compose:

-             raise NotFound('No such compose found.')

+             raise NotFound("No such compose found.")

  

          is_admin = has_role("admins")

  
@@ -525,10 +545,12 @@ 

              raise Forbidden("User %s is not in role admins." % g.user.username)

  

          # can remove compose that is in state of 'done' or 'failed'

-         deletable_states = {n: COMPOSE_STATES[n] for n in ['done', 'failed']}

+         deletable_states = {n: COMPOSE_STATES[n] for n in ["done", "failed"]}

          if compose.state not in deletable_states.values():

-             raise BadRequest('Compose (id=%s) can not be removed, its state need to be in %s.' %

-                              (id, deletable_states.keys()))

+             raise BadRequest(

+                 "Compose (id=%s) can not be removed, its state need to be in %s."

+                 % (id, deletable_states.keys())

+             )

  

          # change compose.time_to_expire to now, so backend will

          # delete this compose as it's an expired compose now
@@ -536,10 +558,11 @@ 

          compose.removed_by = g.user.username

          db.session.add(compose)

          db.session.commit()

-         message = ("The delete request for compose (id=%s) has been accepted and will be"

-                    " processed by backend later." % compose.id)

-         response = jsonify({'status': 202,

-                             'message': message})

+         message = (

+             "The delete request for compose (id=%s) has been accepted and will be"

+             " processed by backend later." % compose.id

+         )

+         response = jsonify({"status": 202, "message": message})

          response.status_code = 202

          return response

  
@@ -564,25 +587,24 @@ 

          :resjson list sigkeys: Default list of sigkeys.

          :statuscode 200: Compose updated and returned.

          """

-         json = {'version': version}

-         config_items = ['auth_backend', 'allowed_clients', 'raw_config_urls', 'sigkeys']

+         json = {"version": version}

+         config_items = ["auth_backend", "allowed_clients", "raw_config_urls", "sigkeys"]

          for item in config_items:

              config_item = getattr(conf, item)

              # All config items have a default, so if doesn't exist it is

              # an error

              if config_item is None:

-                 raise ValueError(

-                     'An invalid config item of "%s" was specified' % item)

+                 raise ValueError('An invalid config item of "%s" was specified' % item)

              json[item] = config_item

          return jsonify(json), 200

  

  

  class Index(View):

  

-     methods = ['GET']

+     methods = ["GET"]

  

      def dispatch_request(self):

-         return render_template('index.html')

+         return render_template("index.html")

  

  

  class MetricsAPI(MethodView):
@@ -597,25 +619,22 @@ 

  

  def register_api_v1():

      """ Registers version 1 of ODCS API. """

-     composes_view = ODCSAPI.as_view('composes')

-     about_view = AboutAPI.as_view('about')

-     metrics_view = MetricsAPI.as_view('metrics')

+     composes_view = ODCSAPI.as_view("composes")

+     about_view = AboutAPI.as_view("about")

+     metrics_view = MetricsAPI.as_view("metrics")

      for key, val in api_v1.items():

          if key.startswith("compose"):

-             app.add_url_rule(val['url'],

-                              endpoint=key,

-                              view_func=composes_view,

-                              **val['options'])

+             app.add_url_rule(

+                 val["url"], endpoint=key, view_func=composes_view, **val["options"]

+             )

          elif key.startswith("about"):

-             app.add_url_rule(val['url'],

-                              endpoint=key,

-                              view_func=about_view,

-                              **val['options'])

+             app.add_url_rule(

+                 val["url"], endpoint=key, view_func=about_view, **val["options"]

+             )

          elif key.startswith("metrics"):

-             app.add_url_rule(val['url'],

-                              endpoint=key,

-                              view_func=metrics_view,

-                              **val['options'])

+             app.add_url_rule(

+                 val["url"], endpoint=key, view_func=metrics_view, **val["options"]

+             )

          else:

              raise ValueError("Unhandled API key: %s." % key)

  

file modified
+42 -58
@@ -29,7 +29,8 @@ 

  from odcs.server import conf

  

  import gi

- gi.require_version('Modulemd', '2.0')

+ 

+ gi.require_version("Modulemd", "2.0")

  from gi.repository import Modulemd  # noqa: E402

  

  
@@ -39,14 +40,13 @@ 

      return mod_index.dump_to_string()

  

  

- def make_module(name, stream, version, requires={}, mdversion=1,

-                 context=None, state=5):

+ def make_module(name, stream, version, requires={}, mdversion=1, context=None, state=5):

      if mdversion == 1:

          mmd = Modulemd.ModuleStreamV1.new(name, stream)

      else:

          mmd = Modulemd.ModuleStreamV2.new(name, stream)

      mmd.set_version(version)

-     mmd.set_context(context or '00000000')

+     mmd.set_context(context or "00000000")

      mmd.set_summary("foo")

      mmd.set_description("foo")

      mmd.add_module_license("GPL")
@@ -61,69 +61,49 @@ 

          mmd.add_dependencies(deps)

  

      return {

-         'name': name,

-         'stream': stream,

-         'version': str(version),

-         'context': context or '00000000',

-         'modulemd': dump_mmd(mmd),

-         'state': state,

+         "name": name,

+         "stream": stream,

+         "version": str(version),

+         "context": context or "00000000",

+         "modulemd": dump_mmd(mmd),

+         "state": state,

      }

  

  

  TEST_MBS_MODULES_MMDv1 = [

      # test_backend.py

-     make_module('moduleA', 'f26', 20170809000000,

-                 {'moduleB': 'f26'}),

-     make_module('moduleA', 'f26', 20170805000000,

-                 {'moduleB': 'f26'}),

- 

-     make_module('moduleB', 'f26', 20170808000000,

-                 {'moduleC': 'f26', 'moduleD': 'f26'}),

-     make_module('moduleB', 'f27', 2017081000000,

-                 {'moduleC': 'f27'}),

- 

-     make_module('moduleC', 'f26', 20170807000000,

-                 {'moduleD': 'f26'}),

- 

-     make_module('moduleD', 'f26', 20170806000000),

- 

+     make_module("moduleA", "f26", 20170809000000, {"moduleB": "f26"}),

+     make_module("moduleA", "f26", 20170805000000, {"moduleB": "f26"}),

+     make_module("moduleB", "f26", 20170808000000, {"moduleC": "f26", "moduleD": "f26"}),

+     make_module("moduleB", "f27", 2017081000000, {"moduleC": "f27"}),

+     make_module("moduleC", "f26", 20170807000000, {"moduleD": "f26"}),

+     make_module("moduleD", "f26", 20170806000000),

      # test_composerthread.py

-     make_module('testmodule', 'master', 20170515074418),

-     make_module('testmodule', 'master', 20170515074419)

+     make_module("testmodule", "master", 20170515074418),

+     make_module("testmodule", "master", 20170515074419),

  ]

  

  

  TEST_MBS_MODULES_MMDv2 = [

      # test_backend.py

-     make_module('moduleA', 'f26', 20170809000000,

-                 {'moduleB': 'f26'}, 2),

-     make_module('moduleA', 'f26', 20170805000000,

-                 {'moduleB': 'f26'}, 2),

- 

-     make_module('moduleB', 'f26', 20170808000000,

-                 {'moduleC': 'f26', 'moduleD': 'f26'}, 2),

-     make_module('moduleB', 'f27', 2017081000000,

-                 {'moduleC': 'f27'}, 2),

- 

-     make_module('moduleC', 'f26', 20170807000000,

-                 {'moduleD': 'f26'}, 2),

- 

-     make_module('moduleD', 'f26', 20170806000000, {}, 2),

- 

+     make_module("moduleA", "f26", 20170809000000, {"moduleB": "f26"}, 2),

+     make_module("moduleA", "f26", 20170805000000, {"moduleB": "f26"}, 2),

+     make_module(

+         "moduleB", "f26", 20170808000000, {"moduleC": "f26", "moduleD": "f26"}, 2

+     ),

+     make_module("moduleB", "f27", 2017081000000, {"moduleC": "f27"}, 2),

+     make_module("moduleC", "f26", 20170807000000, {"moduleD": "f26"}, 2),

+     make_module("moduleD", "f26", 20170806000000, {}, 2),

      # module builds in "done" state.

-     make_module('testmodule', 'master', 20180515074419, {}, 2, state=3),

- 

+     make_module("testmodule", "master", 20180515074419, {}, 2, state=3),

      # test_composerthread.py

-     make_module('testmodule', 'master', 20170515074418, {}, 2),

-     make_module('testmodule', 'master', 20170515074419, {}, 2),

- 

+     make_module("testmodule", "master", 20170515074418, {}, 2),

+     make_module("testmodule", "master", 20170515074419, {}, 2),

      # multiple contexts

-     make_module('parent', 'master', 1, {}, 2, context="a"),

-     make_module('parent', 'master', 1, {}, 2, context="b"),

-     make_module('testcontexts', 'master', 1, {"parent": "master"},

-                 2, context="a"),

-     make_module('testcontexts', 'master', 1, {"parent": "master"},

-                 2, context="b"),

+     make_module("parent", "master", 1, {}, 2, context="a"),

+     make_module("parent", "master", 1, {}, 2, context="b"),

+     make_module("testcontexts", "master", 1, {"parent": "master"}, 2, context="a"),

+     make_module("testcontexts", "master", 1, {"parent": "master"}, 2, context="b"),

  ]

  

  
@@ -133,13 +113,14 @@ 

      up modules are redirected to return results from the TEST_MODULES array

      above.

      """

+ 

      def wrapper(f):

          @wraps(f)

          def wrapped(*args, **kwargs):

              def handle_module_builds(request):

                  query = parse_qs(urlparse(request.url).query)

-                 states = [int(s) for s in query['state']]

-                 nsvc = query['nsvc'][0]

+                 states = [int(s) for s in query["state"]]

+                 nsvc = query["nsvc"][0]

                  nsvc_parts = nsvc.split(":")

                  nsvc_keys = ["name", "stream", "version", "context"]

                  nsvc_dict = {}
@@ -168,11 +149,14 @@ 

                  return (200, {}, json.dumps(body))

  

              responses.add_callback(

-                 responses.GET, conf.mbs_url + '/1/module-builds/',

-                 content_type='application/json',

-                 callback=handle_module_builds)

+                 responses.GET,

+                 conf.mbs_url + "/1/module-builds/",

+                 content_type="application/json",

+                 callback=handle_module_builds,

+             )

  

              return f(*args, **kwargs)

  

          return responses.activate(wrapped)

+ 

      return wrapper

file modified
+144 -133
@@ -44,25 +44,25 @@ 

  

  

  class TestLoadSSLUserFromRequest(ModelsBaseTest):

- 

      def setUp(self):

          super(TestLoadSSLUserFromRequest, self).setUp()

  

-         self.user = User(username='CN=tester1,L=prod,DC=example,DC=com')

+         self.user = User(username="CN=tester1,L=prod,DC=example,DC=com")

          db.session.add(self.user)

          db.session.commit()

  

      def test_create_new_user(self):

          environ_base = {

-             'SSL_CLIENT_VERIFY': 'SUCCESS',

-             'SSL_CLIENT_S_DN': 'CN=client,L=prod,DC=example,DC=com',

+             "SSL_CLIENT_VERIFY": "SUCCESS",

+             "SSL_CLIENT_S_DN": "CN=client,L=prod,DC=example,DC=com",

          }

  

          with app.test_request_context(environ_base=environ_base):

              load_ssl_user_from_request(flask.request)

  

              expected_user = db.session.query(User).filter(

-                 User.username == 'CN=client,L=prod,DC=example,DC=com')[0]

+                 User.username == "CN=client,L=prod,DC=example,DC=com"

+             )[0]

  

              self.assertEqual(expected_user.id, flask.g.user.id)

              self.assertEqual(expected_user.username, flask.g.user.username)
@@ -72,8 +72,8 @@ 

  

      def test_return_existing_user(self):

          environ_base = {

-             'SSL_CLIENT_VERIFY': 'SUCCESS',

-             'SSL_CLIENT_S_DN': self.user.username,

+             "SSL_CLIENT_VERIFY": "SUCCESS",

+             "SSL_CLIENT_S_DN": self.user.username,

          }

  

          with app.test_request_context(environ_base=environ_base):
@@ -87,40 +87,39 @@ 

  

      def test_401_if_ssl_client_verify_not_success(self):

          environ_base = {

-             'SSL_CLIENT_VERIFY': 'GENEROUS',

-             'SSL_CLIENT_S_DN': self.user.username,

+             "SSL_CLIENT_VERIFY": "GENEROUS",

+             "SSL_CLIENT_S_DN": self.user.username,

          }

  

          with app.test_request_context(environ_base=environ_base):

              with self.assertRaises(Unauthorized) as ctx:

                  load_ssl_user_from_request(flask.request)

-             self.assertIn('Cannot verify client: GENEROUS',

-                           ctx.exception.description)

+             self.assertIn("Cannot verify client: GENEROUS", ctx.exception.description)

  

      def test_401_if_cn_not_set(self):

          environ_base = {

-             'SSL_CLIENT_VERIFY': 'SUCCESS',

+             "SSL_CLIENT_VERIFY": "SUCCESS",

          }

  

          with app.test_request_context(environ_base=environ_base):

              with self.assertRaises(Unauthorized) as ctx:

                  load_ssl_user_from_request(flask.request)

-             self.assertIn('Unable to get user information (DN) from client certificate',

-                           ctx.exception.description)

+             self.assertIn(

+                 "Unable to get user information (DN) from client certificate",

+                 ctx.exception.description,

+             )

  

  

  class TestLoadKrbOrSSLUserFromRequest(unittest.TestCase):

- 

      @patch("odcs.server.auth.load_ssl_user_from_request")

      @patch("odcs.server.auth.load_krb_user_from_request")

      def test_load_krb_or_ssl_user_from_request_remote_user(

-             self, load_krb_user, load_ssl_user):

+         self, load_krb_user, load_ssl_user

+     ):

          load_krb_user.return_value = "krb_user"

          load_ssl_user.return_value = "ssl_user"

  

-         environ_base = {

-             'REMOTE_USER': 'newuser@EXAMPLE.COM'

-         }

+         environ_base = {"REMOTE_USER": "newuser@EXAMPLE.COM"}

  

          with app.test_request_context(environ_base=environ_base):

              user = load_krb_or_ssl_user_from_request(flask.request)
@@ -129,13 +128,14 @@ 

      @patch("odcs.server.auth.load_ssl_user_from_request")

      @patch("odcs.server.auth.load_krb_user_from_request")

      def test_load_krb_or_ssl_user_from_request_ssl_client(

-             self, load_krb_user, load_ssl_user):

+         self, load_krb_user, load_ssl_user

+     ):

          load_krb_user.return_value = "krb_user"

          load_ssl_user.return_value = "ssl_user"

  

          environ_base = {

-             'SSL_CLIENT_VERIFY': 'SUCCESS',

-             'SSL_CLIENT_S_DN': 'ssl_user',

+             "SSL_CLIENT_VERIFY": "SUCCESS",

+             "SSL_CLIENT_S_DN": "ssl_user",

          }

  

          with app.test_request_context(environ_base=environ_base):
@@ -144,43 +144,37 @@ 

  

  

  class TestLoadKrbUserFromRequest(ModelsBaseTest):

- 

      def setUp(self):

          super(TestLoadKrbUserFromRequest, self).setUp()

  

-         self.user = User(username='tester1')

+         self.user = User(username="tester1")

          db.session.add(self.user)

          db.session.commit()

  

-     @patch('odcs.server.auth.query_ldap_groups')

+     @patch("odcs.server.auth.query_ldap_groups")

      def test_create_new_user(self, query_ldap_groups):

-         query_ldap_groups.return_value = ['devel', 'admins']

+         query_ldap_groups.return_value = ["devel", "admins"]

  

-         environ_base = {

-             'REMOTE_USER': 'newuser@EXAMPLE.COM'

-         }

+         environ_base = {"REMOTE_USER": "newuser@EXAMPLE.COM"}

  

          with app.test_request_context(environ_base=environ_base):

              load_krb_user_from_request(flask.request)

  

-             expected_user = db.session.query(User).filter(

-                 User.username == 'newuser')[0]

+             expected_user = db.session.query(User).filter(User.username == "newuser")[0]

  

              self.assertEqual(expected_user.id, flask.g.user.id)

              self.assertEqual(expected_user.username, flask.g.user.username)

  

              # Ensure user's groups are created

              self.assertEqual(2, len(flask.g.groups))

-             self.assertEqual(['admins', 'devel'], sorted(flask.g.groups))

+             self.assertEqual(["admins", "devel"], sorted(flask.g.groups))

  

-     @patch('odcs.server.auth.query_ldap_groups')

+     @patch("odcs.server.auth.query_ldap_groups")

      def test_return_existing_user(self, query_ldap_groups):

-         query_ldap_groups.return_value = ['devel', 'admins']

+         query_ldap_groups.return_value = ["devel", "admins"]

          original_users_count = db.session.query(User.id).count()

  

-         environ_base = {

-             'REMOTE_USER': '{0}@EXAMPLE.COM'.format(self.user.username)

-         }

+         environ_base = {"REMOTE_USER": "{0}@EXAMPLE.COM".format(self.user.username)}

  

          with app.test_request_context(environ_base=environ_base):

              load_krb_user_from_request(flask.request)
@@ -188,68 +182,68 @@ 

              self.assertEqual(original_users_count, db.session.query(User.id).count())

              self.assertEqual(self.user.id, flask.g.user.id)

              self.assertEqual(self.user.username, flask.g.user.username)

-             self.assertEqual(['admins', 'devel'], sorted(flask.g.groups))

+             self.assertEqual(["admins", "devel"], sorted(flask.g.groups))

  

      def test_401_if_remote_user_not_present(self):

          with app.test_request_context():

              with self.assertRaises(Unauthorized) as ctx:

                  load_krb_user_from_request(flask.request)

-             self.assertIn('REMOTE_USER is not present in request.',

-                           ctx.exception.description)

+             self.assertIn(

+                 "REMOTE_USER is not present in request.", ctx.exception.description

+             )

  

  

  class TestLoadOpenIDCUserFromRequest(ModelsBaseTest):

- 

      def setUp(self):

          super(TestLoadOpenIDCUserFromRequest, self).setUp()

  

-         self.user = User(username='tester1')

+         self.user = User(username="tester1")

          db.session.add(self.user)

          db.session.commit()

  

-     @patch('odcs.server.auth.requests.get')

+     @patch("odcs.server.auth.requests.get")

      def test_create_new_user(self, get):

          get.return_value.status_code = 200

          get.return_value.json.return_value = {

-             'groups': ['tester', 'admin'],

-             'name': 'new_user',

+             "groups": ["tester", "admin"],

+             "name": "new_user",

          }

  

          environ_base = {

-             'REMOTE_USER': 'new_user',

-             'OIDC_access_token': '39283',

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

-             'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups '

-                                 'https://pagure.io/odcs/new-compose '

-                                 'https://pagure.io/odcs/renew-compose '

-                                 'https://pagure.io/odcs/delete-compose',

+             "REMOTE_USER": "new_user",

+             "OIDC_access_token": "39283",

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

+             "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups "

+             "https://pagure.io/odcs/new-compose "

+             "https://pagure.io/odcs/renew-compose "

+             "https://pagure.io/odcs/delete-compose",

          }

  

          with app.test_request_context(environ_base=environ_base):

              load_openidc_user(flask.request)

  

-             new_user = db.session.query(User).filter(User.username == 'new_user')[0]

+             new_user = db.session.query(User).filter(User.username == "new_user")[0]

  

              self.assertEqual(new_user, flask.g.user)

-             self.assertEqual('new_user', flask.g.user.username)

-             self.assertEqual(sorted(['admin', 'tester']), sorted(flask.g.groups))

+             self.assertEqual("new_user", flask.g.user.username)

+             self.assertEqual(sorted(["admin", "tester"]), sorted(flask.g.groups))

  

-     @patch('odcs.server.auth.requests.get')

+     @patch("odcs.server.auth.requests.get")

      def test_return_existing_user(self, get):

          get.return_value.status_code = 200

          get.return_value.json.return_value = {

-             'groups': ['testers', 'admins'],

-             'name': self.user.username,

+             "groups": ["testers", "admins"],

+             "name": self.user.username,

          }

  

          environ_base = {

-             'REMOTE_USER': self.user.username,

-             'OIDC_access_token': '39283',

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

-             'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups '

-                                 'https://pagure.io/odcs/new-compose '

-                                 'https://pagure.io/odcs/renew-compose '

-                                 'https://pagure.io/odcs/delete-compose',

+             "REMOTE_USER": self.user.username,

+             "OIDC_access_token": "39283",

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

+             "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups "

+             "https://pagure.io/odcs/new-compose "

+             "https://pagure.io/odcs/renew-compose "

+             "https://pagure.io/odcs/delete-compose",

          }

  

          with app.test_request_context(environ_base=environ_base):
@@ -262,22 +256,22 @@ 

  

              # Ensure existing user is set in g

              self.assertEqual(self.user.id, flask.g.user.id)

-             self.assertEqual(['admins', 'testers'], sorted(flask.g.groups))

+             self.assertEqual(["admins", "testers"], sorted(flask.g.groups))

  

-     @patch('odcs.server.auth.requests.get')

+     @patch("odcs.server.auth.requests.get")

      def test_user_info_failure(self, get):

          # If the user_info endpoint errors out, we continue to authenticate

          # based only on the user (which we have from the token), ignoring groups.

          get.return_value.status_code = 400

  

          environ_base = {

-             'REMOTE_USER': self.user.username,

-             'OIDC_access_token': '39283',

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

-             'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups '

-                                 'https://pagure.io/odcs/new-compose '

-                                 'https://pagure.io/odcs/renew-compose '

-                                 'https://pagure.io/odcs/delete-compose',

+             "REMOTE_USER": self.user.username,

+             "OIDC_access_token": "39283",

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

+             "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups "

+             "https://pagure.io/odcs/new-compose "

+             "https://pagure.io/odcs/renew-compose "

+             "https://pagure.io/odcs/delete-compose",

          }

  

          with app.test_request_context(environ_base=environ_base):
@@ -289,28 +283,28 @@ 

      def test_401_if_remote_user_not_present(self):

          environ_base = {

              # Missing REMOTE_USER here

-             'OIDC_access_token': '39283',

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

-             'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups',

+             "OIDC_access_token": "39283",

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

+             "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups",

          }

          with app.test_request_context(environ_base=environ_base):

              self.assertRaises(Unauthorized, load_openidc_user, flask.request)

  

      def test_401_if_access_token_not_present(self):

          environ_base = {

-             'REMOTE_USER': 'tester1',

+             "REMOTE_USER": "tester1",

              # Missing OIDC_access_token here

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

-             'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups',

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

+             "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups",

          }

          with app.test_request_context(environ_base=environ_base):

              self.assertRaises(Unauthorized, load_openidc_user, flask.request)

  

      def test_401_if_scope_not_present(self):

          environ_base = {

-             'REMOTE_USER': 'tester1',

-             'OIDC_access_token': '39283',

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

+             "REMOTE_USER": "tester1",

+             "OIDC_access_token": "39283",

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

              # Missing OIDC_CLAIM_scope here

          }

          with app.test_request_context(environ_base=environ_base):
@@ -318,37 +312,46 @@ 

  

      def test_401_if_required_scope_not_present_in_token_scope(self):

          environ_base = {

-             'REMOTE_USER': 'new_user',

-             'OIDC_access_token': '39283',

-             'OIDC_CLAIM_iss': 'https://iddev.fedorainfracloud.org/openidc/',

-             'OIDC_CLAIM_scope': 'openid https://id.fedoraproject.org/scope/groups',

+             "REMOTE_USER": "new_user",

+             "OIDC_access_token": "39283",

+             "OIDC_CLAIM_iss": "https://iddev.fedorainfracloud.org/openidc/",

+             "OIDC_CLAIM_scope": "openid https://id.fedoraproject.org/scope/groups",

          }

  

-         with patch.object(odcs.server.auth.conf, 'auth_openidc_required_scopes', ['new-compose']):

+         with patch.object(

+             odcs.server.auth.conf, "auth_openidc_required_scopes", ["new-compose"]

+         ):

              with app.test_request_context(environ_base=environ_base):

                  with self.assertRaises(Unauthorized) as ctx:

                      load_openidc_user(flask.request)

-                 self.assertIn('Required OIDC scope new-compose not present.',

-                               ctx.exception.description)

+                 self.assertIn(

+                     "Required OIDC scope new-compose not present.",

+                     ctx.exception.description,

+                 )

  

  

  class TestQueryLdapGroups(unittest.TestCase):

      """Test auth.query_ldap_groups"""

  

-     @patch('odcs.server.auth.ldap.initialize')

+     @patch("odcs.server.auth.ldap.initialize")

      def test_get_groups(self, initialize):

          initialize.return_value.search_s.return_value = [

-             ('cn=odcsdev,ou=Groups,dc=example,dc=com',

-              {'gidNumber': ['5523'], 'cn': ['odcsdev']}),

-             ('cn=freshmakerdev,ou=Groups,dc=example,dc=com',

-              {'gidNumber': ['17861'], 'cn': ['freshmakerdev']}),

-             ('cn=devel,ou=Groups,dc=example,dc=com',

-              {'gidNumber': ['5781'], 'cn': ['devel']})

+             (

+                 "cn=odcsdev,ou=Groups,dc=example,dc=com",

+                 {"gidNumber": ["5523"], "cn": ["odcsdev"]},

+             ),

+             (

+                 "cn=freshmakerdev,ou=Groups,dc=example,dc=com",

+                 {"gidNumber": ["17861"], "cn": ["freshmakerdev"]},

+             ),

+             (

+                 "cn=devel,ou=Groups,dc=example,dc=com",

+                 {"gidNumber": ["5781"], "cn": ["devel"]},

+             ),

          ]

  

-         groups = query_ldap_groups('me')

-         self.assertEqual(sorted(['odcsdev', 'freshmakerdev', 'devel']),

-                          sorted(groups))

+         groups = query_ldap_groups("me")

+         self.assertEqual(sorted(["odcsdev", "freshmakerdev", "devel"]), sorted(groups))

  

  

  class TestInitAuth(unittest.TestCase):
@@ -358,78 +361,86 @@ 

          self.login_manager = Mock()

  

      def test_select_kerberos_auth_backend(self):

-         init_auth(self.login_manager, 'kerberos')

-         self.login_manager.request_loader.assert_called_once_with(load_krb_user_from_request)

+         init_auth(self.login_manager, "kerberos")

+         self.login_manager.request_loader.assert_called_once_with(

+             load_krb_user_from_request

+         )

  

      def test_select_openidc_auth_backend(self):

-         init_auth(self.login_manager, 'openidc')

+         init_auth(self.login_manager, "openidc")

          self.login_manager.request_loader.assert_called_once_with(load_openidc_user)

  

      def test_select_ssl_auth_backend(self):

-         init_auth(self.login_manager, 'ssl')

-         self.login_manager.request_loader.assert_called_once_with(load_ssl_user_from_request)

+         init_auth(self.login_manager, "ssl")

+         self.login_manager.request_loader.assert_called_once_with(

+             load_ssl_user_from_request

+         )

  

      def test_select_kerberos_or_ssl_auth_backend(self):

-         init_auth(self.login_manager, 'kerberos_or_ssl')

-         self.login_manager.request_loader.assert_called_once_with(load_krb_or_ssl_user_from_request)

+         init_auth(self.login_manager, "kerberos_or_ssl")

+         self.login_manager.request_loader.assert_called_once_with(

+             load_krb_or_ssl_user_from_request

+         )

  

      def test_not_use_auth_backend(self):

-         init_auth(self.login_manager, 'noauth')

+         init_auth(self.login_manager, "noauth")

          self.login_manager.request_loader.assert_not_called()

  

      def test_error_if_select_an_unknown_backend(self):

-         self.assertRaises(ValueError, init_auth, self.login_manager, 'xxx')

-         self.assertRaises(ValueError, init_auth, self.login_manager, '')

+         self.assertRaises(ValueError, init_auth, self.login_manager, "xxx")

+         self.assertRaises(ValueError, init_auth, self.login_manager, "")

          self.assertRaises(ValueError, init_auth, self.login_manager, None)

  

      def test_init_auth_no_ldap_server(self):

-         with patch.object(odcs.server.auth.conf, 'auth_ldap_server', ''):

-             self.assertRaises(ValueError, init_auth, self.login_manager,

-                               'kerberos')

+         with patch.object(odcs.server.auth.conf, "auth_ldap_server", ""):

+             self.assertRaises(ValueError, init_auth, self.login_manager, "kerberos")

  

      def test_init_auths_no_ldap_group_base(self):

-         with patch.object(odcs.server.auth.conf, 'auth_ldap_group_base', ''):

-             self.assertRaises(ValueError, init_auth, self.login_manager,

-                               'kerberos')

+         with patch.object(odcs.server.auth.conf, "auth_ldap_group_base", ""):

+             self.assertRaises(ValueError, init_auth, self.login_manager, "kerberos")

  

  

  class TestDecoratorRequireScopes(unittest.TestCase):

      """Test decorator require_scopes"""

  

-     @patch.object(conf, 'oidc_base_namespace', new='http://example.com/')

-     @patch.object(conf, 'auth_backend', new='openidc')

+     @patch.object(conf, "oidc_base_namespace", new="http://example.com/")

+     @patch.object(conf, "auth_backend", new="openidc")

      def test_function_is_called(self):

          with app.test_request_context():

-             flask.g.oidc_scopes = ['http://example.com/renew-compose']

+             flask.g.oidc_scopes = ["http://example.com/renew-compose"]

  

              mock_func = Mock()

-             mock_func.__name__ = 'real_function'

-             decorated_func = require_scopes('renew-compose')(mock_func)

+             mock_func.__name__ = "real_function"

+             decorated_func = require_scopes("renew-compose")(mock_func)

              decorated_func(1, 2, 3)

  

          mock_func.assert_called_once_with(1, 2, 3)

  

-     @patch.object(conf, 'oidc_base_namespace', new='http://example.com/')

-     @patch.object(conf, 'auth_backend', new='openidc')

+     @patch.object(conf, "oidc_base_namespace", new="http://example.com/")

+     @patch.object(conf, "auth_backend", new="openidc")

      def test_function_is_not_called_if_scope_is_not_present(self):

          with app.test_request_context():

-             flask.g.oidc_scopes = ['http://example.com/new-compose',

-                                    'http://example.com/renew-compose']

+             flask.g.oidc_scopes = [

+                 "http://example.com/new-compose",

+                 "http://example.com/renew-compose",

+             ]

  

              mock_func = Mock()

-             mock_func.__name__ = 'real_function'

-             decorated_func = require_scopes('delete-compose')(mock_func)

+             mock_func.__name__ = "real_function"

+             decorated_func = require_scopes("delete-compose")(mock_func)

              self.assertRaises(Forbidden, decorated_func, 1, 2, 3)

  

-     @patch.object(conf, 'oidc_base_namespace', new='http://example.com/')

-     @patch.object(conf, 'auth_backend', new='kerberos')

+     @patch.object(conf, "oidc_base_namespace", new="http://example.com/")

+     @patch.object(conf, "auth_backend", new="kerberos")

      def test_function_is_called_for_non_openidc_backend(self):

          with app.test_request_context():

-             flask.g.oidc_scopes = ['http://example.com/new-compose',

-                                    'http://example.com/renew-compose']

+             flask.g.oidc_scopes = [

+                 "http://example.com/new-compose",

+                 "http://example.com/renew-compose",

+             ]

  

              mock_func = Mock()

-             mock_func.__name__ = 'real_function'

-             decorated_func = require_scopes('delete-compose')(mock_func)

+             mock_func.__name__ = "real_function"

+             decorated_func = require_scopes("delete-compose")(mock_func)

              decorated_func(1, 2, 3)

              mock_func.assert_called_once_with(1, 2, 3)

file modified
+610 -276
@@ -32,10 +32,15 @@ 

  from odcs.common.types import COMPOSE_FLAGS, COMPOSE_RESULTS, COMPOSE_STATES

  from odcs.server.mbs import ModuleLookupError

  from odcs.server.pungi import PungiSourceType

- from odcs.server.backend import (resolve_compose, get_reusable_compose,

-                                  generate_compose, generate_pulp_compose,

-                                  generate_pungi_compose, validate_pungi_compose,

-                                  koji_get_inherited_tags)

+ from odcs.server.backend import (

+     resolve_compose,

+     get_reusable_compose,

+     generate_compose,

+     generate_pulp_compose,

+     generate_pungi_compose,

+     validate_pungi_compose,

+     koji_get_inherited_tags,

+ )

  from odcs.server.utils import makedirs

  import odcs.server.backend

  from .utils import ModelsBaseTest, AnyStringWith
@@ -46,11 +51,16 @@ 

  

  

  class TestBackend(ModelsBaseTest):

- 

      def test_resolve_compose_repo(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.REPO,

+             os.path.join(thisdir, "repo"),

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -62,28 +72,42 @@ 

      @mock_mbs()

      def test_resolve_compose_module(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleA:f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          resolve_compose(c)

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source,

-                          ' '.join(["moduleA:f26:20170809000000:00000000",

-                                    "moduleB:f26:20170808000000:00000000",

-                                    "moduleC:f26:20170807000000:00000000",

-                                    "moduleD:f26:20170806000000:00000000"]))

+         self.assertEqual(

+             c.source,

+             " ".join(

+                 [

+                     "moduleA:f26:20170809000000:00000000",

+                     "moduleB:f26:20170808000000:00000000",

+                     "moduleC:f26:20170807000000:00000000",

+                     "moduleD:f26:20170806000000:00000000",

+                 ]

+             ),

+         )

  

      @mock_mbs()

      def test_resolve_compose_module_include_done_modules(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "testmodule:master",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["include_done_modules"])

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["include_done_modules"],

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -95,9 +119,13 @@ 

      @mock_mbs()

      def test_resolve_compose_module_include_done_modules_full_nsvc(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "testmodule:master:20180515074419:00000000",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -109,83 +137,121 @@ 

      @mock_mbs()

      def test_resolve_compose_module_devel(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleA:f26 moduleA-devel:f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          resolve_compose(c)

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source,

-                          ' '.join(["moduleA-devel:f26:20170809000000:00000000",

-                                    "moduleA:f26:20170809000000:00000000",

-                                    "moduleB:f26:20170808000000:00000000",

-                                    "moduleC:f26:20170807000000:00000000",

-                                    "moduleD:f26:20170806000000:00000000"]))

+         self.assertEqual(

+             c.source,

+             " ".join(

+                 [

+                     "moduleA-devel:f26:20170809000000:00000000",

+                     "moduleA:f26:20170809000000:00000000",

+                     "moduleB:f26:20170808000000:00000000",

+                     "moduleC:f26:20170807000000:00000000",

+                     "moduleD:f26:20170806000000:00000000",

+                 ]

+             ),

+         )

  

      @mock_mbs()

      def test_resolve_compose_module_devel_deps_resolving(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleA-devel:f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          resolve_compose(c)

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source,

-                          ' '.join(["moduleA-devel:f26:20170809000000:00000000",

-                                    "moduleA:f26:20170809000000:00000000",

-                                    "moduleB:f26:20170808000000:00000000",

-                                    "moduleC:f26:20170807000000:00000000",

-                                    "moduleD:f26:20170806000000:00000000"]))

+         self.assertEqual(

+             c.source,

+             " ".join(

+                 [

+                     "moduleA-devel:f26:20170809000000:00000000",

+                     "moduleA:f26:20170809000000:00000000",

+                     "moduleB:f26:20170808000000:00000000",

+                     "moduleC:f26:20170807000000:00000000",

+                     "moduleD:f26:20170806000000:00000000",

+                 ]

+             ),

+         )

  

      @mock_mbs()

      def test_resolve_compose_module_multiple_contexts_no_deps(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "testcontexts:master:1",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["no_deps"])

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["no_deps"],

+         )

          db.session.commit()

  

          resolve_compose(c)

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source,

-                          " ".join(["testcontexts:master:1:a",

-                                    "testcontexts:master:1:b"]))

+         self.assertEqual(

+             c.source, " ".join(["testcontexts:master:1:a", "testcontexts:master:1:b"])

+         )

  

      @mock_mbs()

      def test_resolve_compose_module_multiple_contexts_deps(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "testcontexts:master:1",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          resolve_compose(c)

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source,

-                          " ".join(["parent:master:1:a",

-                                    "parent:master:1:b",

-                                    "testcontexts:master:1:a",

-                                    "testcontexts:master:1:b"]))

+         self.assertEqual(

+             c.source,

+             " ".join(

+                 [

+                     "parent:master:1:a",

+                     "parent:master:1:b",

+                     "testcontexts:master:1:a",

+                     "testcontexts:master:1:b",

+                 ]

+             ),

+         )

  

      @mock_mbs()

      def test_resolve_compose_module_no_deps(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleA:f26 moduleA:f26",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["no_deps"])

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["no_deps"],

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -197,10 +263,14 @@ 

      @mock_mbs()

      def expect_module_lookup_error(self, source, match, flags=0):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              source,

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=flags)

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=flags,

+         )

          db.session.commit()

  

          with six.assertRaisesRegex(self, ModuleLookupError, match):
@@ -209,28 +279,42 @@ 

      @mock_mbs(1)

      def test_resolve_compose_module_mmdv1(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleA:f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          resolve_compose(c)

          db.session.commit()

  

          c = db.session.query(Compose).filter(Compose.id == 1).one()

-         self.assertEqual(c.source,

-                          ' '.join(["moduleA:f26:20170809000000:00000000",

-                                    "moduleB:f26:20170808000000:00000000",

-                                    "moduleC:f26:20170807000000:00000000",

-                                    "moduleD:f26:20170806000000:00000000"]))

+         self.assertEqual(

+             c.source,

+             " ".join(

+                 [

+                     "moduleA:f26:20170809000000:00000000",

+                     "moduleB:f26:20170808000000:00000000",

+                     "moduleC:f26:20170807000000:00000000",

+                     "moduleD:f26:20170806000000:00000000",

+                 ]

+             ),

+         )

  

      @mock_mbs(1)

      def test_resolve_compose_module_no_deps_mmdv1(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleA:f26 moduleA:f26",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["no_deps"])

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["no_deps"],

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -242,35 +326,41 @@ 

      @mock_mbs(1)

      def expect_module_lookup_error_mmdv1(self, source, match, flags=0):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              source,

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=flags)

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=flags,

+         )

          db.session.commit()

  

          with six.assertRaisesRegex(self, ModuleLookupError, match):

              resolve_compose(c)

  

      def test_resolve_compose_module_not_found(self):

-         self.expect_module_lookup_error("moduleA:f30",

-                                         "Failed to find")

+         self.expect_module_lookup_error("moduleA:f30", "Failed to find")

  

      def test_resolve_compose_module_not_found2(self):

-         self.expect_module_lookup_error("moduleA:f26:00000000000000",

-                                         "Failed to find")

+         self.expect_module_lookup_error("moduleA:f26:00000000000000", "Failed to find")

  

      def test_resolve_compose_module_conflict(self):

          self.expect_module_lookup_error(

-             "moduleA:f26:20170809000000 moduleA:f26:20170805000000",

-             "conflicts with")

+             "moduleA:f26:20170809000000 moduleA:f26:20170805000000", "conflicts with"

+         )

  

      @mock_mbs()

      def test_resolve_compose_module_not_conflict(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleB:f26 moduleB:f27",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["no_deps"])

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["no_deps"],

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -278,10 +368,14 @@ 

      @mock_mbs(1)

      def test_resolve_compose_module_not_conflict_mmdv1(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE,

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

              "moduleB:f26 moduleB:f27",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["no_deps"])

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["no_deps"],

+         )

          db.session.commit()

  

          resolve_compose(c)
@@ -289,18 +383,24 @@ 

      def test_resolve_compose_module_dep_not_found(self):

          self.expect_module_lookup_error(

              "moduleB:f26 moduleB:f27",

-             "Failed to find module moduleC:f27 in ready state in the MBS.")

+             "Failed to find module moduleC:f27 in ready state in the MBS.",

+         )

  

      @patch("odcs.server.backend.create_koji_session")

-     def test_resolve_compose_repo_no_override_koji_event(

-             self, create_koji_session):

+     def test_resolve_compose_repo_no_override_koji_event(self, create_koji_session):

          koji_session = MagicMock()

          create_koji_session.return_value = koji_session

          koji_session.getLastEvent.return_value = {"id": 123}

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          c.koji_event = 1

          db.session.commit()

  
@@ -312,13 +412,25 @@ 

  

      def test_get_reusable_compose(self):

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.REPO,

+             os.path.join(thisdir, "repo"),

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          resolve_compose(old_c)

          old_c.state = COMPOSE_STATES["done"]

          c = Compose.create(

-             db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.REPO,

+             os.path.join(thisdir, "repo"),

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          resolve_compose(c)

          db.session.add(old_c)

          db.session.add(c)
@@ -331,26 +443,39 @@ 

          koji_session = MagicMock()

          koji_session.getTag.return_value = None

  

-         with six.assertRaisesRegex(self, ValueError, 'Unknown Koji tag foo.'):

+         with six.assertRaisesRegex(self, ValueError, "Unknown Koji tag foo."):

              koji_get_inherited_tags(koji_session, "foo")

  

      @patch("odcs.server.backend.koji_get_inherited_tags")

      @patch("odcs.server.backend.create_koji_session")

      def test_get_reusable_tag_compose(

-             self, create_koji_session, koji_get_inherited_tags):

+         self, create_koji_session, koji_get_inherited_tags

+     ):

          koji_get_inherited_tags.return_value = ["foo", "bar"]

          koji_session = MagicMock()

          create_koji_session.return_value = koji_session

          koji_session.tagChangedSinceEvent.return_value = False

  

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          old_c.koji_event = 1

          old_c.state = COMPOSE_STATES["done"]

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          c.koji_event = 2

          db.session.add(old_c)

          db.session.add(c)
@@ -362,20 +487,33 @@ 

      @patch("odcs.server.backend.koji_get_inherited_tags")

      @patch("odcs.server.backend.create_koji_session")

      def test_get_reusable_tag_compose_none_koji_event(

-             self, create_koji_session, koji_get_inherited_tags):

+         self, create_koji_session, koji_get_inherited_tags

+     ):

          koji_get_inherited_tags.return_value = ["foo", "bar"]

          koji_session = MagicMock()

          create_koji_session.return_value = koji_session

          koji_session.tagChangedSinceEvent.return_value = False

  

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          old_c.koji_event = None

          old_c.state = COMPOSE_STATES["done"]

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          c.koji_event = 2

          db.session.add(old_c)

          db.session.add(c)
@@ -387,20 +525,33 @@ 

      @patch("odcs.server.backend.koji_get_inherited_tags")

      @patch("odcs.server.backend.create_koji_session")

      def test_get_reusable_tag_compose_tag_changed(

-             self, create_koji_session, koji_get_inherited_tags):

+         self, create_koji_session, koji_get_inherited_tags

+     ):

          koji_get_inherited_tags.return_value = ["foo", "bar"]

          koji_session = MagicMock()

          create_koji_session.return_value = koji_session

          koji_session.tagChangedSinceEvent.return_value = True

  

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          old_c.koji_event = 1

          old_c.state = COMPOSE_STATES["done"]

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          c.koji_event = 2

          db.session.add(old_c)

          db.session.add(c)
@@ -412,20 +563,33 @@ 

      @patch("odcs.server.backend.koji_get_inherited_tags")

      @patch("odcs.server.backend.create_koji_session")

      def test_get_reusable_tag_compose_renew(

-             self, create_koji_session, koji_get_inherited_tags):

+         self, create_koji_session, koji_get_inherited_tags

+     ):

          koji_get_inherited_tags.return_value = ["foo", "bar"]

          koji_session = MagicMock()

          create_koji_session.return_value = koji_session

          koji_session.tagChangedSinceEvent.return_value = False

  

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          old_c.koji_event = 10

          old_c.state = COMPOSE_STATES["done"]

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          # c.koji_event is lower than old_c.koji_event, because "c" is actually

          # older than old_c and we are testing that its renewal does not reuse

          # the newer "c" compose.
@@ -439,9 +603,16 @@ 

  

      def test_get_reusable_compose_attrs_not_the_same(self):

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-             COMPOSE_RESULTS["repository"], 3600, packages="ed", sigkeys="123",

-             builds="foo-1-1")

+             db.session,

+             "me",

+             PungiSourceType.REPO,

+             os.path.join(thisdir, "repo"),

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+             sigkeys="123",

+             builds="foo-1-1",

+         )

          old_c.state = COMPOSE_STATES["done"]

          resolve_compose(old_c)

          db.session.add(old_c)
@@ -462,9 +633,16 @@ 

          attrs["target_dir"] = "private"

          for attr, value in attrs.items():

              c = Compose.create(

-                 db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-                 COMPOSE_RESULTS["repository"], 3600, packages="ed", sigkeys="123",

-                 builds="foo-1-1")

+                 db.session,

+                 "me",

+                 PungiSourceType.REPO,

+                 os.path.join(thisdir, "repo"),

+                 COMPOSE_RESULTS["repository"],

+                 3600,

+                 packages="ed",

+                 sigkeys="123",

+                 builds="foo-1-1",

+             )

              setattr(c, attr, value)

  

              # Do not resolve compose for non-existing source and in case we
@@ -479,8 +657,7 @@ 

  

      @patch("odcs.server.pulp.Pulp._rest_post")

      @patch("odcs.server.backend._write_repo_file")

-     def test_generate_pulp_compose(

-             self, _write_repo_file, pulp_rest_post):

+     def test_generate_pulp_compose(self, _write_repo_file, pulp_rest_post):

          pulp_rest_post.return_value = [

              {

                  "notes": {
@@ -508,14 +685,20 @@ 

                      "signatures": "SIG1,SIG3",

                      "product_versions": "",

                  }

-             }

+             },

          ]

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1 foo-2 foo-3",

-             COMPOSE_RESULTS["repository"], 3600)

-         with patch.object(odcs.server.backend.conf, 'pulp_server_url',

-                           "https://localhost/"):

+             db.session,

+             "me",

+             PungiSourceType.PULP,

+             "foo-1 foo-2 foo-3",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

+         with patch.object(

+             odcs.server.backend.conf, "pulp_server_url", "https://localhost/"

+         ):

              generate_pulp_compose(c)

  

          expected_query = {
@@ -523,12 +706,11 @@ 

                  "fields": ["notes"],

                  "filters": {

                      "notes.content_set": {"$in": ["foo-1", "foo-2", "foo-3"]},

-                     "notes.include_in_download_service": "True"

-                 }

+                     "notes.include_in_download_service": "True",

+                 },

              }

          }

-         pulp_rest_post.assert_called_once_with('repositories/search/',

-                                                expected_query)

+         pulp_rest_post.assert_called_once_with("repositories/search/", expected_query)

  

          expected_repofile = """

  [foo-1]
@@ -552,7 +734,7 @@ 

          _write_repo_file.assert_called_once_with(c, expected_repofile)

  

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.state_reason, 'Compose is generated successfully')

+         self.assertEqual(c.state_reason, "Compose is generated successfully")

          self.assertEqual(len(c.arches.split(" ")), 2)

          self.assertEqual(set(c.arches.split(" ")), set(["x86_64", "ppc64"]))

          self.assertEqual(len(c.sigkeys.split(" ")), 3)
@@ -562,45 +744,50 @@ 

      @patch("odcs.server.backend._write_repo_file")

      @patch("os.symlink")

      def test_generate_pulp_compose_include_inpublished_pulp_repos_passed(

-             self, symlink, _write_repo_file, pulp_rest_post):

+         self, symlink, _write_repo_file, pulp_rest_post

+     ):

          pulp_rest_post.return_value = [

              {

                  "notes": {

                      "relative_url": "content/1/x86_64/os",

                      "content_set": "foo-1",

                      "arch": "ppc64",

-                     "signatures": "SIG1,SIG2"

+                     "signatures": "SIG1,SIG2",

                  },

              },

          ]

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1 foo-2",

-             COMPOSE_RESULTS["repository"], 3600,

-             flags=COMPOSE_FLAGS["include_unpublished_pulp_repos"])

+             db.session,

+             "me",

+             PungiSourceType.PULP,

+             "foo-1 foo-2",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=COMPOSE_FLAGS["include_unpublished_pulp_repos"],

+         )

          db.session.add(c)

          db.session.commit()

  

-         with patch.object(odcs.server.backend.conf, 'pulp_server_url',

-                           "https://localhost/"):

+         with patch.object(

+             odcs.server.backend.conf, "pulp_server_url", "https://localhost/"

+         ):

              generate_compose(1)

  

          expected_query = {

              "criteria": {

                  "fields": ["notes"],

-                 "filters": {

-                     "notes.content_set": {"$in": ["foo-1", "foo-2"]},

-                 }

+                 "filters": {"notes.content_set": {"$in": ["foo-1", "foo-2"]}},

              }

          }

-         pulp_rest_post.assert_called_once_with('repositories/search/',

-                                                expected_query)

+         pulp_rest_post.assert_called_once_with("repositories/search/", expected_query)

          symlink.assert_not_called()

  

      @patch("odcs.server.pulp.Pulp._rest_post")

      @patch("odcs.server.backend._write_repo_file")

      def test_generate_pulp_compose_content_set_not_found(

-             self, _write_repo_file, pulp_rest_post):

+         self, _write_repo_file, pulp_rest_post

+     ):

          pulp_rest_post.return_value = [

              {

                  "notes": {
@@ -614,13 +801,19 @@ 

          ]

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1 foo-2",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.PULP,

+             "foo-1 foo-2",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  

-         with patch.object(odcs.server.backend.conf, 'pulp_server_url',

-                           "https://localhost/"):

+         with patch.object(

+             odcs.server.backend.conf, "pulp_server_url", "https://localhost/"

+         ):

              generate_compose(1)

  

          expected_query = {
@@ -628,22 +821,25 @@ 

                  "fields": ["notes"],

                  "filters": {

                      "notes.content_set": {"$in": ["foo-1", "foo-2"]},

-                     "notes.include_in_download_service": "True"

-                 }

+                     "notes.include_in_download_service": "True",

+                 },

              }

          }

-         pulp_rest_post.assert_called_once_with('repositories/search/',

-                                                expected_query)

+         pulp_rest_post.assert_called_once_with("repositories/search/", expected_query)

          _write_repo_file.assert_not_called()

  

          c1 = Compose.query.filter(Compose.id == 1).one()

          self.assertEqual(c1.state, COMPOSE_STATES["failed"])

-         six.assertRegex(self, c1.state_reason, r'Error while generating compose: Failed to find all the content_sets.*')

+         six.assertRegex(

+             self,

+             c1.state_reason,

+             r"Error while generating compose: Failed to find all the content_sets.*",

+         )

  

      @patch("odcs.server.pulp.Pulp._rest_post")

      @patch("odcs.server.backend._write_repo_file")

      def test_generate_pulp_compose_content_set_not_found_allow_absent(

-             self, _write_repo_file, pulp_rest_post

+         self, _write_repo_file, pulp_rest_post

      ):

          pulp_rest_post.return_value = [

              {
@@ -658,15 +854,20 @@ 

          ]

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1 foo-2",

-             COMPOSE_RESULTS["repository"], 3600,

+             db.session,

+             "me",

+             PungiSourceType.PULP,

+             "foo-1 foo-2",

+             COMPOSE_RESULTS["repository"],

+             3600,

              flags=COMPOSE_FLAGS["ignore_absent_pulp_repos"],

          )

          db.session.add(c)

          db.session.commit()

  

-         with patch.object(odcs.server.backend.conf, 'pulp_server_url',

-                           "https://localhost/"):

+         with patch.object(

+             odcs.server.backend.conf, "pulp_server_url", "https://localhost/"

+         ):

              generate_compose(1)

  

          expected_query = {
@@ -674,12 +875,11 @@ 

                  "fields": ["notes"],

                  "filters": {

                      "notes.content_set": {"$in": ["foo-1", "foo-2"]},

-                     "notes.include_in_download_service": "True"

-                 }

+                     "notes.include_in_download_service": "True",

+                 },

              }

          }

-         pulp_rest_post.assert_called_once_with('repositories/search/',

-                                                expected_query)

+         pulp_rest_post.assert_called_once_with("repositories/search/", expected_query)

          expected_repofile = """

  [foo-1]

  name=foo-1
@@ -697,14 +897,21 @@ 

      @patch("odcs.server.backend.generate_pungi_compose")

      @patch("odcs.server.pungi.PungiLogs.get_error_string")

      def test_generate_compose_exception(

-             self, get_error_string, generate_pungi_compose, resolve_compose):

+         self, get_error_string, generate_pungi_compose, resolve_compose

+     ):

          get_error_string.return_value = "Compose failed for unknown reason."

          generate_pungi_compose.side_effect = RuntimeError(

-             "Expected exception, see %s" % os.path.join(conf.target_dir, "foo.log"))

+             "Expected exception, see %s" % os.path.join(conf.target_dir, "foo.log")

+         )

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo-1",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo-1",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  
@@ -715,22 +922,30 @@ 

          c1 = Compose.query.filter(Compose.id == 1).one()

          self.assertEqual(c1.state, COMPOSE_STATES["failed"])

          six.assertRegex(

-             self, c1.state_reason,

-             r'Error while generating compose: Expected exception, see '

-             'http://localhost/odcs/foo.log\n'

-             'Compose failed for unknown reason*')

+             self,

+             c1.state_reason,

+             r"Error while generating compose: Expected exception, see "

+             "http://localhost/odcs/foo.log\n"

+             "Compose failed for unknown reason*",

+         )

  

      @patch("odcs.server.backend.resolve_compose")

      @patch("odcs.server.backend.generate_pungi_compose")

      @patch("odcs.server.pungi.PungiLogs.get_error_string")

      def test_generate_compose_pungi_logs_exceptions(

-             self, get_error_string, generate_pungi_compose, resolve_compose):

+         self, get_error_string, generate_pungi_compose, resolve_compose

+     ):

          get_error_string.side_effect = RuntimeError("PungiLogs Expected exception")

          generate_pungi_compose.side_effect = RuntimeError("Expected exception")

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo-1",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo-1",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  
@@ -739,70 +954,91 @@ 

          c1 = Compose.query.filter(Compose.id == 1).one()

          self.assertEqual(c1.state, COMPOSE_STATES["failed"])

          six.assertRegex(

-             self, c1.state_reason,

-             r'Error while generating compose: Expected exception*')

+             self,

+             c1.state_reason,

+             r"Error while generating compose: Expected exception*",

+         )

  

-     @patch('odcs.server.backend.tag_changed', return_value=True)

-     @patch('odcs.server.backend.create_koji_session')

+     @patch("odcs.server.backend.tag_changed", return_value=True)

+     @patch("odcs.server.backend.create_koji_session")

      def test_resolve_compose_from_koji_tag_get_last_event_if_tag_changed(

-             self, create_koji_session, tag_changed):

+         self, create_koji_session, tag_changed

+     ):

          session = create_koji_session.return_value

-         fake_koji_event = {'id': 234567}

+         fake_koji_event = {"id": 234567}

          session.getLastEvent.return_value = fake_koji_event

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo-1",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo-1",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  

-         with patch.dict('odcs.server.backend.LAST_EVENTS_CACHE', {'foo-1': 123456}):

+         with patch.dict("odcs.server.backend.LAST_EVENTS_CACHE", {"foo-1": 123456}):

              resolve_compose(c)

-             c.koji_event = fake_koji_event['id']

+             c.koji_event = fake_koji_event["id"]

  

-     @patch('odcs.server.backend.tag_changed')

-     @patch('odcs.server.backend.create_koji_session')

+     @patch("odcs.server.backend.tag_changed")

+     @patch("odcs.server.backend.create_koji_session")

      def test_resolve_compose_from_koji_tag_reuse_koji_event_if_tag_not_changed(

-             self, create_koji_session, tag_changed):

+         self, create_koji_session, tag_changed

+     ):

          tag_changed.return_value = False

          session = create_koji_session.return_value

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo-1",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo-1",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  

-         with patch.dict('odcs.server.backend.LAST_EVENTS_CACHE', {'foo-1': 123456}):

+         with patch.dict("odcs.server.backend.LAST_EVENTS_CACHE", {"foo-1": 123456}):

              resolve_compose(c)

              c.koji_event = 123456

  

              session.getLastEvent.assert_not_called()

  

-     @patch('odcs.server.backend.tag_changed')

-     @patch('odcs.server.backend.create_koji_session')

+     @patch("odcs.server.backend.tag_changed")

+     @patch("odcs.server.backend.create_koji_session")

      def test_resolve_compose_from_koji_tag_get_last_koji_event_if_tag_not_cached(

-             self, create_koji_session, tag_changed):

-         fake_koji_event = {'id': 789065}

+         self, create_koji_session, tag_changed

+     ):

+         fake_koji_event = {"id": 789065}

          session = create_koji_session.return_value

          session.getLastEvent.return_value = fake_koji_event

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "foo-1",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "foo-1",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  

-         with patch.dict('odcs.server.backend.LAST_EVENTS_CACHE', {'bar-2': 123456}):

+         with patch.dict("odcs.server.backend.LAST_EVENTS_CACHE", {"bar-2": 123456}):

              resolve_compose(c)

-             c.koji_event = fake_koji_event['id']

+             c.koji_event = fake_koji_event["id"]

  

              tag_changed.assert_not_called()

  

-     @patch('odcs.server.mbs.MBS.validate_module_list')

-     @patch('odcs.server.mbs.MBS.get_latest_modules')

+     @patch("odcs.server.mbs.MBS.validate_module_list")

+     @patch("odcs.server.mbs.MBS.get_latest_modules")

      def test_resolve_compose_module_filter_base_module(

-             self, get_latest_modules, validate_module_list):

+         self, get_latest_modules, validate_module_list

+     ):

          modules = [

              {"name": "foo", "stream": "0", "version": 1, "context": "x"},

              {"name": "bar", "stream": "0", "version": 1, "context": "y"},
@@ -813,8 +1049,13 @@ 

          ]

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.MODULE, "foo:0 bar:0",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "foo:0 bar:0",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  
@@ -825,54 +1066,78 @@ 

  

          # Now try removing "platform" from the conf.base_module_names, so it

          # should appear in a compose source.

-         with patch.object(odcs.server.config.Config, 'base_module_names',

-                           new=["random_name"]):

+         with patch.object(

+             odcs.server.config.Config, "base_module_names", new=["random_name"]

+         ):

              resolve_compose(c)

              self.assertEqual(c.source, "bar:0:1:y foo:0:1:x platform:0:1:z")

  

-     @patch('odcs.server.pungi_compose.PungiCompose.get_rpms_data')

+     @patch("odcs.server.pungi_compose.PungiCompose.get_rpms_data")

      def test_resolve_compose_pungi_compose_source_type(self, get_rpms_data):

          get_rpms_data.return_value = {

-             'sigkeys': set(['sigkey1', None]),

-             'arches': set(['x86_64']),

-             'builds': {

-                 'flatpak-rpm-macros-29-6.module+125+c4f5c7f2': set([

-                     'flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.src',

-                     'flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.x86_64']),

-                 'flatpak-runtime-config-29-4.module+125+c4f5c7f2': set([

-                     'flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.src',

-                     'flatpak-runtime-config2-0:29-4.module+125+c4f5c7f2.x86_64'])

-             }

+             "sigkeys": set(["sigkey1", None]),

+             "arches": set(["x86_64"]),

+             "builds": {

+                 "flatpak-rpm-macros-29-6.module+125+c4f5c7f2": set(

+                     [

+                         "flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.src",

+                         "flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.x86_64",

+                     ]

+                 ),

+                 "flatpak-runtime-config-29-4.module+125+c4f5c7f2": set(

+                     [

+                         "flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.src",

+                         "flatpak-runtime-config2-0:29-4.module+125+c4f5c7f2.x86_64",

+                     ]

+                 ),

+             },

          }

  

          c = Compose.create(

-             db.session, "me", PungiSourceType.PUNGI_COMPOSE,

+             db.session,

+             "me",

+             PungiSourceType.PUNGI_COMPOSE,

              "http://localhost/compose/Temporary",

-             COMPOSE_RESULTS["repository"], 3600)

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

  

          resolve_compose(c)

          self.assertEqual(c.sigkeys.split(" "), ["sigkey1", ""])

          self.assertEqual(c.arches.split(" "), ["x86_64"])

-         self.assertEqual(set(c.builds.split(" ")), set([

-             'flatpak-rpm-macros-29-6.module+125+c4f5c7f2',

-             'flatpak-runtime-config-29-4.module+125+c4f5c7f2']))

-         self.assertEqual(set(c.packages.split(" ")), set([

-             'flatpak-rpm-macros',

-             'flatpak-runtime-config',

-             'flatpak-runtime-config2']))

+         self.assertEqual(

+             set(c.builds.split(" ")),

+             set(

+                 [

+                     "flatpak-rpm-macros-29-6.module+125+c4f5c7f2",

+                     "flatpak-runtime-config-29-4.module+125+c4f5c7f2",

+                 ]

+             ),

+         )

+         self.assertEqual(

+             set(c.packages.split(" ")),

+             set(

+                 [

+                     "flatpak-rpm-macros",

+                     "flatpak-runtime-config",

+                     "flatpak-runtime-config2",

+                 ]

+             ),

+         )

  

  

  class TestGeneratePungiCompose(ModelsBaseTest):

- 

      def setUp(self):

          super(TestGeneratePungiCompose, self).setUp()

  

          self.patch_resolve_compose = patch("odcs.server.backend.resolve_compose")

          self.resolve_compose = self.patch_resolve_compose.start()

  

-         self.patch_get_reusable_compose = patch("odcs.server.backend.get_reusable_compose")

+         self.patch_get_reusable_compose = patch(

+             "odcs.server.backend.get_reusable_compose"

+         )

          self.get_reusable_compose = self.patch_get_reusable_compose.start()

          self.get_reusable_compose.return_value = False

  
@@ -888,7 +1153,9 @@ 

          self.patch_update_cache = patch("odcs.server.backend.KojiTagCache.update_cache")

          self.update_cache = self.patch_update_cache.start()

  

-         self.patch_validate_pungi_compose = patch("odcs.server.backend.validate_pungi_compose")

+         self.patch_validate_pungi_compose = patch(

+             "odcs.server.backend.validate_pungi_compose"

+         )

          self.validate_pungi_compose = self.patch_validate_pungi_compose.start()

  

          # Mocked method to store Pungi.pungi_cfg to self.pungi_cfg, so we can
@@ -918,10 +1185,17 @@ 

  

      def test_generate_pungi_compose(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1 pkg2 pkg3',

-             arches="x86_64 s390", multilib_arches="i686 x86_64",

-             multilib_method=1)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1 pkg2 pkg3",

+             arches="x86_64 s390",

+             multilib_arches="i686 x86_64",

+             multilib_method=1,

+         )

          c.id = 1

  

          generate_pungi_compose(c)
@@ -937,15 +1211,24 @@ 

          self.assertEqual(self.pungi_config.gather_method, "deps")

          self.assertEqual(self.pungi_config.pkgset_koji_inherit, True)

          self.assertEqual(set(self.pungi_config.arches), set(["x86_64", "s390"]))

-         self.assertEqual(set(self.pungi_config.multilib_arches), set(["i686", "x86_64"]))

+         self.assertEqual(

+             set(self.pungi_config.multilib_arches), set(["i686", "x86_64"])

+         )

          self.assertEqual(self.pungi_config.multilib_method, ["runtime"])

  

      def test_generate_pungi_compose_multiarch_arches_None(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1 pkg2 pkg3',

-             arches="x86_64 s390", multilib_arches=None,

-             multilib_method=None)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1 pkg2 pkg3",

+             arches="x86_64 s390",

+             multilib_arches=None,

+             multilib_method=None,

+         )

          c.id = 1

  

          generate_pungi_compose(c)
@@ -954,9 +1237,15 @@ 

  

      def test_generate_pungi_compose_nodeps(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1 pkg2 pkg3',

-             flags=COMPOSE_FLAGS["no_deps"])

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1 pkg2 pkg3",

+             flags=COMPOSE_FLAGS["no_deps"],

+         )

          c.id = 1

  

          generate_pungi_compose(c)
@@ -965,9 +1254,15 @@ 

  

      def test_generate_pungi_compose_noinheritance(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1 pkg2 pkg3',

-             flags=COMPOSE_FLAGS["no_inheritance"])

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1 pkg2 pkg3",

+             flags=COMPOSE_FLAGS["no_inheritance"],

+         )

          c.id = 1

  

          generate_pungi_compose(c)
@@ -976,9 +1271,15 @@ 

  

      def test_generate_pungi_compose_builds(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, builds='foo-1-1 bar-1-1',

-             flags=COMPOSE_FLAGS["no_inheritance"])

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             builds="foo-1-1 bar-1-1",

+             flags=COMPOSE_FLAGS["no_inheritance"],

+         )

          c.id = 1

  

          generate_pungi_compose(c)
@@ -986,60 +1287,87 @@ 

  

      def test_generate_pungi_compose_source_type_build(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.BUILD, "x",

-             COMPOSE_RESULTS["repository"], 60, builds='foo-1-1 bar-1-1',

-             flags=COMPOSE_FLAGS["no_inheritance"])

+             db.session,

+             "me",

+             PungiSourceType.BUILD,

+             "x",

+             COMPOSE_RESULTS["repository"],

+             60,

+             builds="foo-1-1 bar-1-1",

+             flags=COMPOSE_FLAGS["no_inheritance"],

+         )

          c.id = 1

  

          generate_pungi_compose(c)

          self.assertEqual(self.pungi_config.koji_tag, None)

          self.assertEqual(self.pungi_config.builds, ["foo-1-1", "bar-1-1"])

  

-     @patch.object(odcs.server.config.Config, 'raw_config_urls',

-                   new={

-                       "pungi_cfg": {

-                           "url": "git://localhost/test.git",

-                           "config_filename": "pungi.conf"}

-                   })

+     @patch.object(

+         odcs.server.config.Config,

+         "raw_config_urls",

+         new={

+             "pungi_cfg": {

+                 "url": "git://localhost/test.git",

+                 "config_filename": "pungi.conf",

+             }

+         },

+     )

      @patch("odcs.server.utils.makedirs")

      @patch("os.symlink")

      @patch("os.unlink")

      @patch("odcs.server.pungi.PungiLogs.get_config_dump")

-     def test_generate_pungi_compose_raw_config(self, config_dump, unlink, symlink, makedirs):

+     def test_generate_pungi_compose_raw_config(

+         self, config_dump, unlink, symlink, makedirs

+     ):

          config_dump.return_value = "fake\npungi\nconf\n"

          c = Compose.create(

-             db.session, "me", PungiSourceType.RAW_CONFIG, "pungi_cfg#hash",

-             COMPOSE_RESULTS["repository"], 60)

+             db.session,

+             "me",

+             PungiSourceType.RAW_CONFIG,

+             "pungi_cfg#hash",

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

          c.compose_type = "production"

          c.pungi_compose_id = "compose-1-10-2020110.n.0"

          c.id = 1

  

          fake_raw_config_urls = {

-             'pungi_cfg': {

-                 'url': 'git://localhost/test.git',

-                 'config_filename': 'pungi.conf'

+             "pungi_cfg": {

+                 "url": "git://localhost/test.git",

+                 "config_filename": "pungi.conf",

              }

          }

-         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

+         with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

              generate_pungi_compose(c)

  

          self.assertEqual(c.pungi_config_dump, "fake\npungi\nconf\n")

-         self.assertEqual(self.pungi_config.pungi_cfg, {

-             'url': 'git://localhost/test.git',

-             'config_filename': 'pungi.conf',

-             'commit': 'hash'

-         })

+         self.assertEqual(

+             self.pungi_config.pungi_cfg,

+             {

+                 "url": "git://localhost/test.git",

+                 "config_filename": "pungi.conf",

+                 "commit": "hash",

+             },

+         )

          self.assertEqual(self.old_compose, AnyStringWith("/test_composes/production"))

  

          makedirs.assert_called_once_with(AnyStringWith("/test_composes/production"))

-         symlink.assert_has_calls([

-             call('../odcs-1',

-                  AnyStringWith('/test_composes/production/compose-1-10-2020110.n.0')),

-             call('../odcs-1',

-                  AnyStringWith('/test_composes/production/latest-compose-1')),

-         ])

+         symlink.assert_has_calls(

+             [

+                 call(

+                     "../odcs-1",

+                     AnyStringWith("/test_composes/production/compose-1-10-2020110.n.0"),

+                 ),

+                 call(

+                     "../odcs-1",

+                     AnyStringWith("/test_composes/production/latest-compose-1"),

+                 ),

+             ]

+         )

          unlink.assert_called_with(

-             AnyStringWith('/test_composes/production/latest-compose-1'))

+             AnyStringWith("/test_composes/production/latest-compose-1")

+         )

  

  

  class TestValidatePungiCompose(ModelsBaseTest):
@@ -1049,17 +1377,23 @@ 

          super(TestValidatePungiCompose, self).setUp()

  

          self.c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1 pkg2 pkg3')

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1 pkg2 pkg3",

+         )

          db.session.commit()

  

          # Remove any previous toplevel_dir.

          if os.path.exists(self.c.toplevel_dir):

              shutil.rmtree(self.c.toplevel_dir)

  

-         compose_dir = os.path.join(self.c.toplevel_dir, 'compose')

-         metadata_dir = os.path.join(compose_dir, 'metadata')

-         self.rpms_metadata = os.path.join(metadata_dir, 'rpms.json')

+         compose_dir = os.path.join(self.c.toplevel_dir, "compose")

+         metadata_dir = os.path.join(compose_dir, "metadata")

+         self.rpms_metadata = os.path.join(metadata_dir, "rpms.json")

          makedirs(metadata_dir)

  

          rm = Rpms()
@@ -1130,11 +1464,11 @@ 

          super(TestValidatePungiCompose, self).tearDown()

  

      def test_missing_packages(self):

-         with six.assertRaisesRegex(self, RuntimeError, 'not present.+pkg3'):

+         with six.assertRaisesRegex(self, RuntimeError, "not present.+pkg3"):

              validate_pungi_compose(self.c)

  

      def test_all_packages_are_included(self):

-         self.c.packages = 'pkg1 pkg1-lib pkg2 pkg2-lib'

+         self.c.packages = "pkg1 pkg1-lib pkg2 pkg2-lib"

          db.session.commit()

  

          validate_pungi_compose(self.c)

@@ -32,7 +32,8 @@ 

          super(TestBackendThread, self).setUp()

  

          self.patch_do_work = patch(

-             "odcs.server.backend.BackendThread.do_work", autospec=True)

+             "odcs.server.backend.BackendThread.do_work", autospec=True

+         )

          self.do_work = self.patch_do_work.start()

  

          self.thread = BackendThread()

file modified
+86 -31
@@ -29,12 +29,10 @@ 

  from odcs.server import db, conf

  from odcs.server.cache import KojiTagCache

  from odcs.server.models import Compose

- from odcs.common.types import (

-     COMPOSE_RESULTS, COMPOSE_STATES, PungiSourceType)

+ from odcs.common.types import COMPOSE_RESULTS, COMPOSE_STATES, PungiSourceType

  

  

  class TestKojiTagCache(ModelsBaseTest):

- 

      def setUp(self):

          super(TestKojiTagCache, self).setUp()

          compose = MagicMock()
@@ -43,64 +41,102 @@ 

  

      def test_cached_compose_dir(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          ret = self.cache.cached_compose_dir(c)

          expected = os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64")

          self.assertEqual(ret, expected)

  

      def test_compose_dir_flags(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600, flags=1)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=1,

+         )

          ret = self.cache.cached_compose_dir(c)

          expected = os.path.join(conf.target_dir, "koji_tag_cache/f26-1--x86_64")

          self.assertEqual(ret, expected)

  

      def test_compose_dir_sigkeys(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600, flags=1,

-             sigkeys="a b c")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=1,

+             sigkeys="a b c",

+         )

          ret = self.cache.cached_compose_dir(c)

          expected = os.path.join(conf.target_dir, "koji_tag_cache/f26-1-a-b-c-x86_64")

          self.assertEqual(ret, expected)

  

      def test_compose_dir_sigkeys_arches_unsorted(self):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600, flags=1,

-             sigkeys="c b a", arches="i686 x86_64")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             flags=1,

+             sigkeys="c b a",

+             arches="i686 x86_64",

+         )

          ret = self.cache.cached_compose_dir(c)

-         expected = os.path.join(conf.target_dir, "koji_tag_cache/f26-1-a-b-c-i686-x86_64")

+         expected = os.path.join(

+             conf.target_dir, "koji_tag_cache/f26-1-a-b-c-i686-x86_64"

+         )

          self.assertEqual(ret, expected)

  

      @patch("os.path.exists")

      def test_is_cached(self, exists):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

  

          for cached in [True, False]:

              exists.return_value = cached

              ret = self.cache.is_cached(c)

              self.assertEqual(ret, cached)

              exists.assert_called_once_with(

-                 os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64"))

+                 os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64")

+             )

              exists.reset_mock()

  

      @patch("shutil.copytree")

      def test_reuse_cached(self, copytree):

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

          self.cache.reuse_cached(c)

          copytree.assert_called_once_with(

              os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64"),

              os.path.join(conf.target_dir, "koji_tag_cache/odcs-1-1-19700101.n.0"),

-             symlinks=True)

+             symlinks=True,

+         )

  

      @patch("shutil.copytree")

      @patch("os.path.realpath")
@@ -108,8 +144,13 @@ 

      def test_update_cache(self, rmtree, realpath, copytree):

          realpath.return_value = "/tmp/real/path"

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(c)

          db.session.commit()

          for done in [True, False]:
@@ -119,7 +160,8 @@ 

                  copytree.assert_called_once_with(

                      "/tmp/real/path",

                      os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64"),

-                     symlinks=True)

+                     symlinks=True,

+                 )

              else:

                  copytree.assert_not_called()

              copytree.reset_mock()
@@ -132,22 +174,31 @@ 

      def test_update_cache_rmtree_if_exists(self, exists, rmtree, realpath, copytree):

          realpath.return_value = "/tmp/real/path"

          c = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          c.state = COMPOSE_STATES["done"]

          db.session.add(c)

          db.session.commit()

  

          self.cache.update_cache(c)

          rmtree.assert_called_once_with(

-             os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64"))

+             os.path.join(conf.target_dir, "koji_tag_cache/f26-0--x86_64")

+         )

  

      @patch("os.listdir")

      @patch("os.path.getmtime")

      @patch("shutil.rmtree")

      @patch("os.path.exists")

-     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

-                   new={"releng-private": "/tmp/private"})

+     @patch.object(

+         odcs.server.config.Config,

+         "extra_target_dirs",

+         new={"releng-private": "/tmp/private"},

+     )

      def test_remove_old_koji_tag_cache_data(self, exists, rmtree, getmtime, listdir):

          exists.return_value = True

          now = time.time()
@@ -159,15 +210,19 @@ 

          self.cache.remove_old_koji_tag_cache_data()

          self.assertEqual(

              rmtree.call_args_list,

-             [call(os.path.join(self.cache.cache_dir, "foo")),

-              call("/tmp/private/koji_tag_cache/foo")])

+             [

+                 call(os.path.join(self.cache.cache_dir, "foo")),

+                 call("/tmp/private/koji_tag_cache/foo"),

+             ],

+         )

  

      @patch("os.listdir")

      @patch("os.path.getmtime")

      @patch("shutil.rmtree")

      @patch("os.path.exists")

      def test_remove_old_koji_tag_cache_data_getmtime_raises(

-             self, exists, rmtree, getmtime, listdir):

+         self, exists, rmtree, getmtime, listdir

+     ):

          exists.return_value = True

          listdir.return_value = ["foo", "bar"]

          getmtime.side_effect = OSError("path does not exist")

@@ -11,23 +11,18 @@ 

  from odcs.server.models import Compose

  

  

- class TestCeleryRouter():

- 

+ class TestCeleryRouter:

      @patch("odcs.server.celery_tasks.get_odcs_compose")

      def test_empty_rule(self, mock_get_compose):

          mock_compose = Mock()

  

-         compose_md = {

-             "source_type": 3

-         }

+         compose_md = {"source_type": 3}

  

          mock_conf = {

              "routing_rules": {

                  "odcs.server.celery_tasks.generate_pungi_compose": {

                      "pungi_composes": {},

-                     "other_composes": {

-                         "source_type": 4,

-                     },

+                     "other_composes": {"source_type": 4},

                  },

              },

              "cleanup_task": "odcs.server.celery_tasks.run_cleanup",
@@ -41,25 +36,22 @@ 

          mock_get_compose.return_value = mock_compose

          args = [[1], {}]

          kwargs = {}

-         queue = tr.route_for_task("odcs.server.celery_tasks.generate_pungi_compose",

-                                   *args, **kwargs)

+         queue = tr.route_for_task(

+             "odcs.server.celery_tasks.generate_pungi_compose", *args, **kwargs

+         )

          assert queue == {"queue": "pungi_composes"}

  

      @patch("odcs.server.celery_tasks.get_odcs_compose")

      def test_default_queue(self, mock_get_compose):

          mock_compose = Mock()

  

-         compose_md = {

-             "source_type": 3

-         }

+         compose_md = {"source_type": 3}

  

          mock_conf = {

              "routing_rules": {

                  "some.other.task": {

                      "pungi_composes": {},

-                     "other_composes": {

-                         "source_type": 4,

-                     },

+                     "other_composes": {"source_type": 4},

                  },

              },

              "cleanup_task": "odcs.server.celery_tasks.run_cleanup",
@@ -73,27 +65,22 @@ 

          mock_get_compose.return_value = mock_compose

          args = [[1], {}]

          kwargs = {}

-         queue = tr.route_for_task("odcs.server.celery_tasks.generate_pungi_compose",

-                                   *args, **kwargs)

+         queue = tr.route_for_task(

+             "odcs.server.celery_tasks.generate_pungi_compose", *args, **kwargs

+         )

          assert queue == {"queue": "default_queue"}

  

      @patch("odcs.server.celery_tasks.get_odcs_compose")

      def test_rule_with_single_property(self, mock_get_compose):

          mock_compose = Mock()

  

-         compose_md = {

-             "source_type": 3

-         }

+         compose_md = {"source_type": 3}

  

          mock_conf = {

              "routing_rules": {

                  "odcs.server.celery_tasks.generate_pungi_compose": {

-                     "pungi_composes": {

-                         "source_type": 3,

-                     },

-                     "other_composes": {

-                         "source_type": 4,

-                     },

+                     "pungi_composes": {"source_type": 3},

+                     "other_composes": {"source_type": 4},

                  },

              },

              "cleanup_task": "odcs.server.celery_tasks.run_cleanup",
@@ -107,8 +94,9 @@ 

          mock_get_compose.return_value = mock_compose

          args = [[1], {}]

          kwargs = {}

-         queue = tr.route_for_task("odcs.server.celery_tasks.generate_pungi_compose",

-                                   *args, **kwargs)

+         queue = tr.route_for_task(

+             "odcs.server.celery_tasks.generate_pungi_compose", *args, **kwargs

+         )

          assert queue == {"queue": "pungi_composes"}

  

      @patch("odcs.server.celery_tasks.get_odcs_compose")
@@ -144,27 +132,22 @@ 

          mock_get_compose.return_value = mock_compose

          args = [[1], {}]

          kwargs = {}

-         queue = tr.route_for_task("odcs.server.celery_tasks.generate_pungi_compose",

-                                   *args, **kwargs)

+         queue = tr.route_for_task(

+             "odcs.server.celery_tasks.generate_pungi_compose", *args, **kwargs

+         )

          assert queue == {"queue": "other_composes"}

  

      @patch("odcs.server.celery_tasks.get_odcs_compose")

      def test_cleanup_queue(self, mock_get_compose):

          mock_compose = Mock()

  

-         compose_md = {

-             "source_type": 3

-         }

+         compose_md = {"source_type": 3}

  

          mock_conf = {

              "routing_rules": {

                  "odcs.server.celery_tasks.generate_pungi_compose": {

-                     "pungi_composes": {

-                         "source_type": 3,

-                     },

-                     "other_composes": {

-                         "source_type": 4,

-                     },

+                     "pungi_composes": {"source_type": 3},

+                     "other_composes": {"source_type": 4},

                  },

              },

              "cleanup_task": "odcs.server.celery_tasks.run_cleanup",
@@ -178,24 +161,21 @@ 

          mock_get_compose.return_value = mock_compose

          args = [[1], {}]

          kwargs = {}

-         queue = tr.route_for_task("odcs.server.celery_tasks.run_cleanup",

-                                   *args, **kwargs)

+         queue = tr.route_for_task(

+             "odcs.server.celery_tasks.run_cleanup", *args, **kwargs

+         )

          assert queue == {"queue": conf.celery_cleanup_queue}

  

      @patch("odcs.server.celery_tasks.get_odcs_compose")

      def test_invalid_rule_property_exception(self, mock_get_compose):

          mock_compose = Mock()

  

-         compose_md = {

-             "source_type": 3

-         }

+         compose_md = {"source_type": 3}

  

          mock_conf = {

              "routing_rules": {

                  "odcs.server.celery_tasks.generate_pungi_compose": {

-                     "pungi_composes": {

-                         "bad_compose_prop": 3,

-                     },

+                     "pungi_composes": {"bad_compose_prop": 3},

                  },

              },

              "cleanup_task": "odcs.server.celery_tasks.run_cleanup",
@@ -210,8 +190,9 @@ 

          args = [[1], {}]

          kwargs = {}

          with pytest.raises(ValueError) as e:

-             tr.route_for_task("odcs.server.celery_tasks.generate_pungi_compose",

-                               *args, **kwargs)

+             tr.route_for_task(

+                 "odcs.server.celery_tasks.generate_pungi_compose", *args, **kwargs

+             )

              assert "invalid property" in e.args[0]

              assert "bad_compose_prop" in e.args[0]

  
@@ -227,10 +208,7 @@ 

          mock_conf = {

              "routing_rules": {

                  "odcs.server.celery_tasks.generate_pungi_compose": {

-                     "pungi_composes": {

-                         "source_type": 3,

-                         "source": "^fedora30#.*",

-                     },

+                     "pungi_composes": {"source_type": 3, "source": "^fedora30#.*"},

                  },

              },

              "cleanup_task": "odcs.server.celery_tasks.run_cleanup",
@@ -244,19 +222,30 @@ 

          mock_get_compose.return_value = mock_compose

          args = [[1], {}]

          kwargs = {}

-         queue = tr.route_for_task("odcs.server.celery_tasks.generate_pungi_compose",

-                                   *args, **kwargs)

+         queue = tr.route_for_task(

+             "odcs.server.celery_tasks.generate_pungi_compose", *args, **kwargs

+         )

          assert queue == {"queue": "pungi_composes"}

  

  

  class TestRescheduleWaitingComposes(ModelsBaseTest):

- 

-     def _add_test_compose(self, state, time_submitted=None,

-                           time_started=None,

-                           source_type=PungiSourceType.KOJI_TAG):

+     def _add_test_compose(

+         self,

+         state,

+         time_submitted=None,

+         time_started=None,

+         source_type=PungiSourceType.KOJI_TAG,

+     ):

          compose = Compose.create(

-             db.session, "unknown", source_type, "f26",

-             COMPOSE_RESULTS["repository"], 60, "", 0)

+             db.session,

+             "unknown",

+             source_type,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             "",

+             0,

+         )

          compose.state = state

          compose.celery_task_id = "1"

          if time_submitted:
@@ -269,40 +258,51 @@ 

  

      @patch("odcs.server.celery_tasks.get_current_celery_task_ids")

      @patch("odcs.server.celery_tasks.schedule_compose")

-     def test_reschedule_waiting_composes_generating_state(self, schedule_compose, task_ids):

+     def test_reschedule_waiting_composes_generating_state(

+         self, schedule_compose, task_ids

+     ):

          task_ids.return_value = set(["2"])

          time_submitted = datetime.utcnow() - timedelta(minutes=5)

          composes = []

          for i in range(10):

-             composes.append(self._add_test_compose(

-                 COMPOSE_STATES["wait"], time_submitted=time_submitted))

+             composes.append(

+                 self._add_test_compose(

+                     COMPOSE_STATES["wait"], time_submitted=time_submitted

+                 )

+             )

          composes = sorted(composes, key=lambda c: c.id)

          reschedule_waiting_composes()

-         schedule_compose.assert_has_calls([

-             call(composes[0]), call(composes[1]), call(composes[2]),

-             call(composes[3])])

+         schedule_compose.assert_has_calls(

+             [call(composes[0]), call(composes[1]), call(composes[2]), call(composes[3])]

+         )

  

      @patch("odcs.server.celery_tasks.get_current_celery_task_ids")

      @patch("odcs.server.celery_tasks.schedule_compose")

-     def test_reschedule_waiting_composes_generating_state_not_old_enough(self, schedule_compose, task_ids):

+     def test_reschedule_waiting_composes_generating_state_not_old_enough(

+         self, schedule_compose, task_ids

+     ):

          task_ids.return_value = set(["2"])

          composes = []

          for i in range(10):

-             composes.append(self._add_test_compose(

-                 COMPOSE_STATES["wait"]))

+             composes.append(self._add_test_compose(COMPOSE_STATES["wait"]))

          composes = sorted(composes, key=lambda c: c.id)

          reschedule_waiting_composes()

          schedule_compose.assert_not_called()

  

      @patch("odcs.server.celery_tasks.get_current_celery_task_ids")

      @patch("odcs.server.celery_tasks.schedule_compose")

-     def test_reschedule_waiting_composes_generating_state_old(self, schedule_compose, task_ids):

+     def test_reschedule_waiting_composes_generating_state_old(

+         self, schedule_compose, task_ids

+     ):

          task_ids.return_value = set(["2"])

          time_submitted = datetime.utcnow() - timedelta(days=5)

          composes = []

          for i in range(10):

-             composes.append(self._add_test_compose(

-                 COMPOSE_STATES["wait"], time_submitted=time_submitted))

+             composes.append(

+                 self._add_test_compose(

+                     COMPOSE_STATES["wait"], time_submitted=time_submitted

+                 )

+             )

          composes = sorted(composes, key=lambda c: c.id)

          reschedule_waiting_composes()

          schedule_compose.assert_not_called()

@@ -47,10 +47,10 @@ 

          super(TestComposerThread, self).setUp()

          self.composer = ComposerThread()

  

-         patched_pungi_conf_path = os.path.join(thisdir, '../conf/pungi.conf')

-         self.patch_pungi_conf_path = patch.object(odcs.server.conf,

-                                                   'pungi_conf_path',

-                                                   new=patched_pungi_conf_path)

+         patched_pungi_conf_path = os.path.join(thisdir, "../conf/pungi.conf")

+         self.patch_pungi_conf_path = patch.object(

+             odcs.server.conf, "pungi_conf_path", new=patched_pungi_conf_path

+         )

          self.patch_pungi_conf_path.start()

  

          self.patch_update_cache = patch("odcs.server.backend.KojiTagCache.update_cache")
@@ -71,30 +71,53 @@ 

              time.sleep(0.1)

          return c

  

-     def _add_module_compose(self, source="testmodule-master-20170515074419",

-                             flags=0):

+     def _add_module_compose(self, source="testmodule-master-20170515074419", flags=0):

          compose = Compose.create(

-             db.session, "unknown", PungiSourceType.MODULE, source,

-             COMPOSE_RESULTS["repository"], 60)

+             db.session,

+             "unknown",

+             PungiSourceType.MODULE,

+             source,

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

          db.session.add(compose)

          db.session.commit()

  

      def _add_tag_compose(self, packages=None, flags=0):

          compose = Compose.create(

-             db.session, "unknown", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages, flags)

+             db.session,

+             "unknown",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages,

+             flags,

+         )

          db.session.add(compose)

          db.session.commit()

  

      def _add_repo_composes(self):

          old_c = Compose.create(

-             db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.REPO,

+             os.path.join(thisdir, "repo"),

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          old_c.state = COMPOSE_STATES["done"]

          resolve_compose(old_c)

          c = Compose.create(

-             db.session, "me", PungiSourceType.REPO, os.path.join(thisdir, "repo"),

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.REPO,

+             os.path.join(thisdir, "repo"),

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          db.session.add(old_c)

          db.session.add(c)

          db.session.commit()
@@ -112,16 +135,21 @@ 

          self.composer.do_work()

          c = self._wait_for_compose_state(1, COMPOSE_STATES["done"])

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary"

+         )

          self.assertEqual(self.composer.currently_generating, [1])

  

      @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

-     def test_submit_build_module_without_release(

-             self, wrf, execute_cmd):

+     def test_submit_build_module_without_release(self, wrf, execute_cmd):

          self._add_module_compose("testmodule-master")

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])
@@ -129,9 +157,15 @@ 

          self.composer.do_work()

          c = self._wait_for_compose_state(1, COMPOSE_STATES["done"])

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary"

+         )

          self.assertEqual(c.source, "testmodule:master:20170515074419")

  

      @mock_mbs
@@ -147,16 +181,23 @@ 

          self.composer.do_work()

          c = self._wait_for_compose_state(1, COMPOSE_STATES["done"])

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary"

+         )

          self.assertEqual(self.composer.currently_generating, [1])

  

      @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build_module_without_release_colon_separator(

-             self, wrf, execute_cmd):

+         self, wrf, execute_cmd

+     ):

          self._add_module_compose("testmodule:master")

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])
@@ -164,16 +205,21 @@ 

          self.composer.do_work()

          c = self._wait_for_compose_state(1, COMPOSE_STATES["done"])

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary"

+         )

          self.assertEqual(c.source, "testmodule:master:20170515074419")

  

      @mock_mbs

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend._write_repo_file")

-     def test_submit_build_module_without_release_not_in_mbs(

-             self, wrf, execute_cmd):

+     def test_submit_build_module_without_release_not_in_mbs(self, wrf, execute_cmd):

  

          self._add_module_compose("testmodule2-master")

          c = db.session.query(Compose).filter(Compose.id == 1).one()
@@ -193,9 +239,15 @@ 

          c = self._wait_for_compose_state(2, COMPOSE_STATES["done"])

          self.assertEqual(c.reused_id, 1)

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary"

+         )

          mock_validate_pungi_compose.assert_called_once()

  

      @mock_mbs
@@ -211,9 +263,15 @@ 

          c = self._wait_for_compose_state(2, COMPOSE_STATES["done"])

          self.assertEqual(c.reused_id, 1)

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-1-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-1-1/compose/Temporary"

+         )

  

      @mock_mbs

      @patch("odcs.server.utils.execute_cmd")
@@ -230,9 +288,15 @@ 

          c = self._wait_for_compose_state(2, COMPOSE_STATES["done"])

          self.assertEqual(c.reused_id, None)

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "latest-odcs-2-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/latest-odcs-2-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(

+                 odcs.server.conf.target_dir, "latest-odcs-2-1/compose/Temporary"

+             ),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/latest-odcs-2-1/compose/Temporary"

+         )

  

      @patch("odcs.server.backend.create_koji_session")

      @patch("odcs.server.backend._write_repo_file")
@@ -275,15 +339,20 @@ 

          c = self._wait_for_compose_state(2, COMPOSE_STATES["done"])

          self.assertEqual(c.reused_id, 1)

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "odcs-1/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/odcs-1/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(odcs.server.conf.target_dir, "odcs-1/compose/Temporary"),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/odcs-1/compose/Temporary"

+         )

  

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.backend.create_koji_session")

      @patch("odcs.server.backend._write_repo_file")

      def test_submit_build_reuse_koji_tag_tags_changed(

-             self, wrf, create_koji_session, execute_cmd):

+         self, wrf, create_koji_session, execute_cmd

+     ):

          koji_session = MagicMock()

          create_koji_session.return_value = koji_session

          koji_session.getLastEvent.return_value = {"id": 123}
@@ -300,9 +369,13 @@ 

          c = self._wait_for_compose_state(2, COMPOSE_STATES["done"])

          self.assertEqual(c.reused_id, None)

          self.assertEqual(c.state, COMPOSE_STATES["done"])

-         self.assertEqual(c.result_repo_dir,

-                          os.path.join(odcs.server.conf.target_dir, "odcs-2/compose/Temporary"))

-         self.assertEqual(c.result_repo_url, "http://localhost/odcs/odcs-2/compose/Temporary")

+         self.assertEqual(

+             c.result_repo_dir,

+             os.path.join(odcs.server.conf.target_dir, "odcs-2/compose/Temporary"),

+         )

+         self.assertEqual(

+             c.result_repo_url, "http://localhost/odcs/odcs-2/compose/Temporary"

+         )

  

  

  class TestComposerThreadLostComposes(ModelsBaseTest):
@@ -314,7 +387,8 @@ 

          self.composer = ComposerThread()

  

          self.patch_generate_new_compose = patch(

-             "odcs.server.backend.ComposerThread.generate_new_compose")

+             "odcs.server.backend.ComposerThread.generate_new_compose"

+         )

          self.generate_new_compose = self.patch_generate_new_compose.start()

  

      def tearDown(self):
@@ -323,8 +397,15 @@ 

  

      def _add_test_compose(self, state):

          compose = Compose.create(

-             db.session, "unknown", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, "", 0)

+             db.session,

+             "unknown",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             "",

+             0,

+         )

          compose.state = state

          db.session.add(compose)

          db.session.commit()
@@ -366,19 +447,31 @@ 

          self.composer = ComposerThread()

  

          self.patch_generate_new_compose = patch(

-             "odcs.server.backend.ComposerThread.generate_new_compose")

+             "odcs.server.backend.ComposerThread.generate_new_compose"

+         )

          self.generate_new_compose = self.patch_generate_new_compose.start()

  

      def tearDown(self):

          super(TestComposerThreadStuckWaitComposes, self).tearDown()

          self.patch_generate_new_compose.stop()

  

-     def _add_test_compose(self, state, time_submitted=None,

-                           time_started=None,

-                           source_type=PungiSourceType.KOJI_TAG):

+     def _add_test_compose(

+         self,

+         state,

+         time_submitted=None,

+         time_started=None,

+         source_type=PungiSourceType.KOJI_TAG,

+     ):

          compose = Compose.create(

-             db.session, "unknown", source_type, "f26",

-             COMPOSE_RESULTS["repository"], 60, "", 0)

+             db.session,

+             "unknown",

+             source_type,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             "",

+             0,

+         )

          compose.state = state

          if time_submitted:

              compose.time_submitted = time_submitted

file modified
+58 -44
@@ -45,8 +45,8 @@ 

      fedora_messaging = None

  

  

- @unittest.skipUnless(rhmsg, 'rhmsg is required to run this test case.')

- @unittest.skipIf(six.PY3, 'rhmsg has no Python 3 package so far.')

+ @unittest.skipUnless(rhmsg, "rhmsg is required to run this test case.")

+ @unittest.skipIf(six.PY3, "rhmsg has no Python 3 package so far.")

  class TestRHMsgSendMessageWhenComposeIsCreated(ModelsBaseTest):

      """Test send message when compose is created"""

  
@@ -56,53 +56,61 @@ 

          super(TestRHMsgSendMessageWhenComposeIsCreated, self).setUp()

  

          # Real lock is not required for running tests

-         self.mock_lock = patch('threading.Lock')

+         self.mock_lock = patch("threading.Lock")

          self.mock_lock.start()

  

      def tearDown(self):

          self.mock_lock.stop()

  

      def setup_composes(self):

-         self.compose = Compose.create(db.session,

-                                       "mine",

-                                       PungiSourceType.KOJI_TAG,

-                                       "f25",

-                                       COMPOSE_RESULTS["repository"],

-                                       3600)

+         self.compose = Compose.create(

+             db.session,

+             "mine",

+             PungiSourceType.KOJI_TAG,

+             "f25",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

-     @patch.object(conf, 'messaging_backend', new='rhmsg')

-     @patch('rhmsg.activemq.producer.AMQProducer')

-     @patch('proton.Message')

+     @patch.object(conf, "messaging_backend", new="rhmsg")

+     @patch("rhmsg.activemq.producer.AMQProducer")

+     @patch("proton.Message")

      def assert_messaging(self, compose, Message, AMQProducer):

          db.session.commit()

  

          self.assertEqual(

-             json.dumps({'event': 'state-changed', 'compose': compose.json()}),

-             Message.return_value.body)

+             json.dumps({"event": "state-changed", "compose": compose.json()}),

+             Message.return_value.body,

+         )

  

          producer_send = AMQProducer.return_value.__enter__.return_value.send

          producer_send.assert_called_once_with(Message.return_value)

  

      def test_send_message(self):

-         compose = Compose.create(db.session,

-                                  "me",

-                                  PungiSourceType.MODULE,

-                                  "testmodule-master",

-                                  COMPOSE_RESULTS["repository"],

-                                  3600)

+         compose = Compose.create(

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

  

          self.assert_messaging(compose)

  

      def test_message_on_state_change(self):

-         compose = db.session.query(Compose).filter(

-             Compose.id == self.compose.id).all()[0]

-         compose.state = COMPOSE_STATES['generating']

+         compose = (

+             db.session.query(Compose).filter(Compose.id == self.compose.id).all()[0]

+         )

+         compose.state = COMPOSE_STATES["generating"]

  

          self.assert_messaging(compose)

  

  

- @unittest.skipUnless(fedora_messaging, 'fedora_messaging is required to run this test case.')

+ @unittest.skipUnless(

+     fedora_messaging, "fedora_messaging is required to run this test case."

+ )

  class TestFedoraMessagingSendMessageWhenComposeIsCreated(ModelsBaseTest):

      """Test send message when compose is created"""

  
@@ -112,24 +120,26 @@ 

          super(TestFedoraMessagingSendMessageWhenComposeIsCreated, self).setUp()

  

          # Real lock is not required for running tests

-         self.mock_lock = patch('threading.Lock')

+         self.mock_lock = patch("threading.Lock")

          self.mock_lock.start()

  

      def tearDown(self):

          self.mock_lock.stop()

  

      def setup_composes(self):

-         self.compose = Compose.create(db.session,

-                                       "mine",

-                                       PungiSourceType.KOJI_TAG,

-                                       "f25",

-                                       COMPOSE_RESULTS["repository"],

-                                       3600)

+         self.compose = Compose.create(

+             db.session,

+             "mine",

+             PungiSourceType.KOJI_TAG,

+             "f25",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

-     @patch.object(conf, 'messaging_backend', new='fedora-messaging')

-     @patch('fedora_messaging.api.Message')

-     @patch('fedora_messaging.api.publish')

+     @patch.object(conf, "messaging_backend", new="fedora-messaging")

+     @patch("fedora_messaging.api.Message")

+     @patch("fedora_messaging.api.publish")

      def assert_messaging(self, compose, publish, Message):

          # The db.session.commit() calls on-commit handler which produces the fedora-messaging

          # message.
@@ -137,23 +147,27 @@ 

  

          Message.assert_called_once_with(

              topic="odcs.compose.state-changed",

-             body={'event': 'state-changed', 'compose': compose.json()})

+             body={"event": "state-changed", "compose": compose.json()},

+         )

  

          publish.assert_called_once_with(Message.return_value)

  

      def test_send_message(self):

-         compose = Compose.create(db.session,

-                                  "me",

-                                  PungiSourceType.MODULE,

-                                  "testmodule-master",

-                                  COMPOSE_RESULTS["repository"],

-                                  3600)

+         compose = Compose.create(

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

  

          self.assert_messaging(compose)

  

      def test_message_on_state_change(self):

-         compose = db.session.query(Compose).filter(

-             Compose.id == self.compose.id).all()[0]

-         compose.state = COMPOSE_STATES['generating']

+         compose = (

+             db.session.query(Compose).filter(Compose.id == self.compose.id).all()[0]

+         )

+         compose.state = COMPOSE_STATES["generating"]

  

          self.assert_messaging(compose)

file modified
+30 -11
@@ -33,25 +33,34 @@ 

  

  

  class TestComposesCollector(ModelsBaseTest):

- 

      def setUp(self):

          super(TestComposesCollector, self).setUp()

          self.collector = ComposesCollector()

  

      def test_composes_total(self):

          Compose.create(

-             db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

-             COMPOSE_RESULTS["repository"], 60)

+             db.session,

+             "unknown",

+             PungiSourceType.MODULE,

+             "testmodule:master",

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

          Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

          db.session.commit()

  

          r = self.collector.composes_total()

          for sample in r.samples:

              if (

-                 sample.labels["source_type"] in ["module", "tag"] and

-                 sample.labels["state"] == "wait"

+                 sample.labels["source_type"] in ["module", "tag"]

+                 and sample.labels["state"] == "wait"

              ):

                  self.assertEqual(sample.value, 1)

              else:
@@ -60,12 +69,22 @@ 

      def test_raw_config_composes_count(self):

          for i in range(15):

              Compose.create(

-                 db.session, "unknown", PungiSourceType.RAW_CONFIG, "foo#bar",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "unknown",

+                 PungiSourceType.RAW_CONFIG,

+                 "foo#bar",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

          for i in range(10):

              Compose.create(

-                 db.session, "me", PungiSourceType.RAW_CONFIG, "foo#hash%d" % i,

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "me",

+                 PungiSourceType.RAW_CONFIG,

+                 "foo#hash%d" % i,

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

          db.session.commit()

          r = self.collector.raw_config_composes_count()

          for sample in r.samples:

@@ -25,13 +25,17 @@ 

  from mock import patch, mock_open, MagicMock

  

  from odcs.server.mock_runroot import (

-     mock_runroot_init, raise_if_runroot_key_invalid, mock_runroot_run,

-     mock_runroot_install, rmtree_skip_mounts, cleanup_old_runroots)

+     mock_runroot_init,

+     raise_if_runroot_key_invalid,

+     mock_runroot_run,

+     mock_runroot_install,

+     rmtree_skip_mounts,

+     cleanup_old_runroots,

+ )

  from .utils import AnyStringWith

  

  

  class TestMockRunroot(unittest.TestCase):

- 

      def setUp(self):

          super(TestMockRunroot, self).setUp()

  
@@ -44,21 +48,26 @@ 

      @patch("odcs.server.mock_runroot.rmtree_skip_mounts")

      @patch("odcs.server.mock_runroot.cleanup_old_runroots")

      def test_mock_runroot_init(

-             self, cleanup_old_runroots, rmtree_skip_mounts, fake_print,

-             execute_mock, create_koji_session):

+         self,

+         cleanup_old_runroots,

+         rmtree_skip_mounts,

+         fake_print,

+         execute_mock,

+         create_koji_session,

+     ):

          execute_mock.side_effect = RuntimeError("Expected exception")

          koji_session = create_koji_session.return_value

          koji_session.getRepo.return_value = {"id": 1}

  

          m = mock_open()

-         with patch('odcs.server.mock_runroot.open', m, create=True):

+         with patch("odcs.server.mock_runroot.open", m, create=True):

              with self.assertRaises(RuntimeError):

                  mock_runroot_init("f30-build")

  

          fake_print.assert_called_once()

          m.return_value.write.assert_called_once_with(AnyStringWith("f30-build"))

  

-         execute_mock.assert_called_once_with(AnyStringWith("-"), ['--init'])

+         execute_mock.assert_called_once_with(AnyStringWith("-"), ["--init"])

          rmtree_skip_mounts.assert_called_once()

          cleanup_old_runroots.assert_called_once()

  
@@ -76,16 +85,28 @@ 

      def test_mock_runroot_run(self, execute_cmd, execute_mock):

          mock_runroot_run("foo-bar", ["df", "-h"])

  

-         execute_mock.assert_called_once_with('foo-bar', [

-             '--old-chroot', '--chroot', '--', '/bin/sh', '-c', '{ df -h; }'], False)

-         execute_cmd.assert_any_call([

-             'mount', '-o', 'bind', AnyStringWith('test_composes'), AnyStringWith('test_composes')])

-         execute_cmd.assert_any_call(['umount', '-l', AnyStringWith('test_composes')])

+         execute_mock.assert_called_once_with(

+             "foo-bar",

+             ["--old-chroot", "--chroot", "--", "/bin/sh", "-c", "{ df -h; }"],

+             False,

+         )

+         execute_cmd.assert_any_call(

+             [

+                 "mount",

+                 "-o",

+                 "bind",

+                 AnyStringWith("test_composes"),

+                 AnyStringWith("test_composes"),

+             ]

+         )

+         execute_cmd.assert_any_call(["umount", "-l", AnyStringWith("test_composes")])

  

      @patch("odcs.server.mock_runroot.execute_mock")

      def test_mock_runroot_install(self, execute_mock):

          mock_runroot_install("foo-bar", ["lorax", "dracut"])

-         execute_mock.assert_called_once_with('foo-bar', ['--install', 'lorax', 'dracut'])

+         execute_mock.assert_called_once_with(

+             "foo-bar", ["--install", "lorax", "dracut"]

+         )

  

      @patch("odcs.server.mock_runroot.execute_mock")

      @patch("odcs.server.mock_runroot.rmtree_skip_mounts")
@@ -98,16 +119,28 @@ 

      @patch("odcs.server.mock_runroot.execute_mock")

      @patch("odcs.server.mock_runroot.execute_cmd")

      @patch("odcs.server.mock_runroot.rmtree_skip_mounts")

-     def test_mock_runroot_run_exception(self, rmtree_skip_mounts, execute_cmd, execute_mock):

+     def test_mock_runroot_run_exception(

+         self, rmtree_skip_mounts, execute_cmd, execute_mock

+     ):

          execute_mock.side_effect = RuntimeError("Expected exception")

          with self.assertRaises(RuntimeError):

              mock_runroot_run("foo-bar", ["df", "-h"])

  

-         execute_mock.assert_called_once_with('foo-bar', [

-             '--old-chroot', '--chroot', '--', '/bin/sh', '-c', '{ df -h; }'], False)

-         execute_cmd.assert_any_call([

-             'mount', '-o', 'bind', AnyStringWith('test_composes'), AnyStringWith('test_composes')])

-         execute_cmd.assert_any_call(['umount', '-l', AnyStringWith('test_composes')])

+         execute_mock.assert_called_once_with(

+             "foo-bar",

+             ["--old-chroot", "--chroot", "--", "/bin/sh", "-c", "{ df -h; }"],

+             False,

+         )

+         execute_cmd.assert_any_call(

+             [

+                 "mount",

+                 "-o",

+                 "bind",

+                 AnyStringWith("test_composes"),

+                 AnyStringWith("test_composes"),

+             ]

+         )

+         execute_cmd.assert_any_call(["umount", "-l", AnyStringWith("test_composes")])

          rmtree_skip_mounts.assert_called_once()

  

      @patch("odcs.server.mock_runroot.os.rmdir")
@@ -125,6 +158,7 @@ 

          - /mnt/odcs - non-empty mountpoint directory must not be removed.

          - /x - regular file which should be removed.

          """

+ 

          def mocked_listdir(path):

              # Creates fake directory structure within the /var/lib/mock/foo-bar/root:

              #  - /mnt/koji
@@ -137,6 +171,7 @@ 

              elif path.endswith("/odcs"):

                  return ["foo"]

              return []

+ 

          listdir.side_effect = mocked_listdir

  

          def mocked_isdir(mode):
@@ -144,6 +179,7 @@ 

              # - 0 means it is not a directory.

              # - 1 means it is a directory.

              return mode

+ 

          isdir.side_effect = mocked_isdir

  

          def mocked_lstat(path):
@@ -153,6 +189,7 @@ 

              else:

                  stat_result.st_mode = 1  # Fake value for directory.

              return stat_result

+ 

          lstat.side_effect = mocked_lstat

  

          rmtree_skip_mounts("/var/lib/mock/foo-bar/root", ["/mnt/odcs"])
@@ -182,8 +219,11 @@ 

              else:

                  raise ValueError("stat called for unexpected file.")

              return stat_result

+ 

          stat.side_effect = mocked_stat

  

          cleanup_old_runroots()

  

-         rmtree_skip_mounts.assert_called_once_with("/var/lib/mock/foo", AnyStringWith("test_composes"))

+         rmtree_skip_mounts.assert_called_once_with(

+             "/var/lib/mock/foo", AnyStringWith("test_composes")

+         )

file modified
+127 -69
@@ -36,11 +36,15 @@ 

  

  

  class TestModels(ModelsBaseTest):

- 

      def test_creating_event_and_builds(self):

          compose = Compose.create(

-             db.session, "me", PungiSourceType.MODULE, "testmodule-master",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

          db.session.expire_all()

  
@@ -52,58 +56,73 @@ 

          self.assertEqual(c.results, COMPOSE_RESULTS["repository"])

          self.assertTrue(c.time_to_expire)

  

-         expected_json = {'source_type': 2, 'state': 0, 'time_done': None,

-                          'state_name': 'wait',

-                          'state_reason': None,

-                          'source': u'testmodule-master',

-                          'owner': u'me',

-                          'result_repo': 'http://localhost/odcs/odcs-1/compose/Temporary',

-                          'result_repofile': 'http://localhost/odcs/odcs-1/compose/Temporary/odcs-1.repo',

-                          'time_submitted': c.json()["time_submitted"], 'id': 1,

-                          'time_started': None,

-                          'time_removed': None,

-                          'removed_by': None,

-                          'time_to_expire': c.json()["time_to_expire"],

-                          'flags': [],

-                          'results': ['repository'],

-                          'sigkeys': '',

-                          'koji_event': None,

-                          'koji_task_id': None,

-                          'packages': None,

-                          'builds': None,

-                          'arches': 'x86_64',

-                          'multilib_arches': '',

-                          'multilib_method': 0,

-                          'lookaside_repos': None,

-                          'modular_koji_tags': None,

-                          'module_defaults_url': None,

-                          'label': None,

-                          'compose_type': None,

-                          'pungi_compose_id': None,

-                          'pungi_config_dump': 'test',

-                          'target_dir': 'default',

-                          'toplevel_url': 'http://localhost/odcs/odcs-1'}

+         expected_json = {

+             "source_type": 2,

+             "state": 0,

+             "time_done": None,

+             "state_name": "wait",

+             "state_reason": None,

+             "source": u"testmodule-master",

+             "owner": u"me",

+             "result_repo": "http://localhost/odcs/odcs-1/compose/Temporary",

+             "result_repofile": "http://localhost/odcs/odcs-1/compose/Temporary/odcs-1.repo",

+             "time_submitted": c.json()["time_submitted"],

+             "id": 1,

+             "time_started": None,

+             "time_removed": None,

+             "removed_by": None,

+             "time_to_expire": c.json()["time_to_expire"],

+             "flags": [],

+             "results": ["repository"],

+             "sigkeys": "",

+             "koji_event": None,

+             "koji_task_id": None,

+             "packages": None,

+             "builds": None,

+             "arches": "x86_64",

+             "multilib_arches": "",

+             "multilib_method": 0,

+             "lookaside_repos": None,

+             "modular_koji_tags": None,

+             "module_defaults_url": None,

+             "label": None,

+             "compose_type": None,

+             "pungi_compose_id": None,

+             "pungi_config_dump": "test",

+             "target_dir": "default",

+             "toplevel_url": "http://localhost/odcs/odcs-1",

+         }

          self.assertEqual(c.json(True), expected_json)

  

      def test_compose_paths(self):

          compose = Compose.create(

-             db.session, "me", PungiSourceType.MODULE, "testmodule-master",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          compose.id = 1

          self.assertEqual(compose.toplevel_dir, os.path.join(conf.target_dir, "odcs-1"))

          self.assertEqual(

              compose.result_repofile_path,

-             os.path.join(conf.target_dir, "odcs-1/compose/Temporary/odcs-1.repo")

+             os.path.join(conf.target_dir, "odcs-1/compose/Temporary/odcs-1.repo"),

          )

          self.assertEqual(

              compose.result_repo_dir,

-             os.path.join(conf.target_dir, "odcs-1/compose/Temporary")

+             os.path.join(conf.target_dir, "odcs-1/compose/Temporary"),

          )

  

      def test_target_dir_none(self):

          compose = Compose.create(

-             db.session, "me", PungiSourceType.MODULE, "testmodule-master",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          compose.target_dir = None

          db.session.commit()

          self.assertEqual(compose.target_dir, conf.target_dir)
@@ -114,8 +133,13 @@ 

          by Compose.create_copy() method.

          """

          compose = Compose.create(

-             db.session, "me", PungiSourceType.MODULE, "testmodule-master",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.commit()

  

          # Generate non-default data for every attribute in compose, so we can
@@ -129,8 +153,9 @@ 

              elif t == "DATETIME":

                  new_value = datetime.utcnow()

              else:

-                 raise ValueError("New column type %r added, please handle it "

-                                  "in this test" % t)

+                 raise ValueError(

+                     "New column type %r added, please handle it " "in this test" % t

+                 )

              setattr(compose, c.name, new_value)

  

          db.session.commit()
@@ -140,41 +165,51 @@ 

          for c in Compose.__table__.columns:

              # Following are list of fields which should not be copied

              # in create_copy() method.

-             if c.name in ["id", "state", "state_reason", "time_to_expire",

-                           "time_done", "time_submitted", "time_removed",

-                           "removed_by", "reused_id", "koji_task_id",

-                           "time_started", "pungi_compose_id", "celery_task_id"]:

+             if c.name in [

+                 "id",

+                 "state",

+                 "state_reason",

+                 "time_to_expire",

+                 "time_done",

+                 "time_submitted",

+                 "time_removed",

+                 "removed_by",

+                 "reused_id",

+                 "koji_task_id",

+                 "time_started",

+                 "pungi_compose_id",

+                 "celery_task_id",

+             ]:

                  assertMethod = self.assertNotEqual

              else:

                  assertMethod = self.assertEqual

              assertMethod(

-                 [c.name, getattr(compose, c.name)],

-                 [c.name, getattr(copy, c.name)])

+                 [c.name, getattr(compose, c.name)], [c.name, getattr(copy, c.name)]

+             )

  

  

  class TestUserModel(ModelsBaseTest):

- 

      def test_find_by_email(self):

-         db.session.add(User(username='tester1'))

-         db.session.add(User(username='admin'))

+         db.session.add(User(username="tester1"))

+         db.session.add(User(username="admin"))

          db.session.commit()

  

-         user = User.find_user_by_name('admin')

-         self.assertEqual('admin', user.username)

+         user = User.find_user_by_name("admin")

+         self.assertEqual("admin", user.username)

  

      def test_create_user(self):

-         User.create_user(username='tester2')

+         User.create_user(username="tester2")

          db.session.commit()

  

-         user = User.find_user_by_name('tester2')

-         self.assertEqual('tester2', user.username)

+         user = User.find_user_by_name("tester2")

+         self.assertEqual("tester2", user.username)

  

      def test_no_group_is_added_if_no_groups(self):

-         User.create_user(username='tester1')

+         User.create_user(username="tester1")

          db.session.commit()

  

-         user = User.find_user_by_name('tester1')

-         self.assertEqual('tester1', user.username)

+         user = User.find_user_by_name("tester1")

+         self.assertEqual("tester1", user.username)

  

  

  class ComposeModel(ModelsBaseTest):
@@ -184,17 +219,40 @@ 

          super(ComposeModel, self).setUp()

  

          self.c1 = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60)

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

          self.c2 = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1')

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1",

+         )

          self.c3 = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1')

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1",

+         )

          self.c4 = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-             COMPOSE_RESULTS["repository"], 60, packages='pkg1')

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             COMPOSE_RESULTS["repository"],

+             60,

+             packages="pkg1",

+         )

  

          map(db.session.add, (self.c1, self.c2, self.c3, self.c4))

          db.session.commit()

file modified
+72 -50
@@ -32,10 +32,8 @@ 

  

  @patch("odcs.server.pulp.Pulp._rest_post")

  class TestPulp(ModelsBaseTest):

- 

      def test_pulp_request(self, pulp_rest_post):

-         c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

+         c = Compose.create(db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

          db.session.commit()

  

          pulp_rest_post.return_value = []
@@ -43,18 +41,20 @@ 

          pulp = Pulp("http://localhost/", "user", "pass", c)

          pulp.get_repos_from_content_sets(["foo-1", "foo-2"])

          pulp_rest_post.assert_called_once_with(

-             'repositories/search/',

-             {'criteria': {

-                 'fields': ['notes'],

-                 'filters': {

-                     'notes.include_in_download_service': 'True',

-                     'notes.content_set': {'$in': ['foo-1', 'foo-2']}

+             "repositories/search/",

+             {

+                 "criteria": {

+                     "fields": ["notes"],

+                     "filters": {

+                         "notes.include_in_download_service": "True",

+                         "notes.content_set": {"$in": ["foo-1", "foo-2"]},

+                     },

                  }

-             }})

+             },

+         )

  

      def test_pulp_request_include_inpublished(self, pulp_rest_post):

-         c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

+         c = Compose.create(db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

          db.session.commit()

  

          pulp_rest_post.return_value = []
@@ -62,21 +62,21 @@ 

          pulp = Pulp("http://localhost/", "user", "pass", c)

          pulp.get_repos_from_content_sets(["foo-1", "foo-2"], True)

          pulp_rest_post.assert_called_once_with(

-             'repositories/search/',

-             {'criteria': {

-                 'fields': ['notes'],

-                 'filters': {

-                     'notes.content_set': {'$in': ['foo-1', 'foo-2']}

+             "repositories/search/",

+             {

+                 "criteria": {

+                     "fields": ["notes"],

+                     "filters": {"notes.content_set": {"$in": ["foo-1", "foo-2"]}},

                  }

-             }})

+             },

+         )

  

      def test_generate_pulp_compose_arch_merge(self, pulp_rest_post):

          """

          Tests that multiple repos in single content_set are merged into

          single one by replacing arch with $basearch variable if possible.

          """

-         c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

+         c = Compose.create(db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

          db.session.commit()

  

          pulp_rest_post.return_value = [
@@ -106,7 +106,7 @@ 

                      "signatures": "SIG1,SIG3",

                      "product_versions": "",

                  }

-             }

+             },

          ]

  

          pulp = Pulp("http://localhost/", "user", "pass", c)
@@ -125,18 +125,18 @@ 

                      "arches": set(["ppc64"]),

                      "sigkeys": ["SIG1", "SIG3"],

                      "product_versions": "",

-                 }

-             })

+                 },

+             },

+         )

  

      @patch("odcs.server.mergerepo.execute_cmd")

      @patch("odcs.server.mergerepo.makedirs")

      @patch("odcs.server.mergerepo.Lock")

      @patch("odcs.server.mergerepo.MergeRepo._download_repodata")

      def test_pulp_compose_merge_repos(

-             self, download_repodata, lock, makedirs, execute_cmd,

-             pulp_rest_post):

-         c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

+         self, download_repodata, lock, makedirs, execute_cmd, pulp_rest_post

+     ):

+         c = Compose.create(db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

          db.session.commit()

  

          pulp_rest_post.return_value = [
@@ -190,45 +190,66 @@ 

                      "arches": set(["x86_64", "ppc64le"]),

                      "sigkeys": ["SIG1", "SIG2"],

                  }

-             })

+             },

+         )

  

          makedirs.assert_any_call(c.result_repo_dir + "/foo-1/x86_64")

          makedirs.assert_any_call(c.result_repo_dir + "/foo-1/ppc64le")

  

          repo_prefix = "%s/pulp_repo_cache/content/" % conf.target_dir

          execute_cmd.assert_any_call(

-             ['/usr/bin/mergerepo_c', '--method', 'nvr', '-o',

-              c.result_repo_dir + '/foo-1/x86_64',

-              '--repo-prefix-search', '%s/pulp_repo_cache' % conf.target_dir,

-              '--repo-prefix-replace', 'http://localhost/',

-              '-r', repo_prefix + "1.0/x86_64/os",

-              '-r', repo_prefix + "1.1/x86_64/os"], timeout=1800)

+             [

+                 "/usr/bin/mergerepo_c",

+                 "--method",

+                 "nvr",

+                 "-o",

+                 c.result_repo_dir + "/foo-1/x86_64",

+                 "--repo-prefix-search",

+                 "%s/pulp_repo_cache" % conf.target_dir,

+                 "--repo-prefix-replace",

+                 "http://localhost/",

+                 "-r",

+                 repo_prefix + "1.0/x86_64/os",

+                 "-r",

+                 repo_prefix + "1.1/x86_64/os",

+             ],

+             timeout=1800,

+         )

          execute_cmd.assert_any_call(

-             ['/usr/bin/mergerepo_c', '--method', 'nvr', '-o',

-              c.result_repo_dir + '/foo-1/ppc64le',

-              '--repo-prefix-search', '%s/pulp_repo_cache' % conf.target_dir,

-              '--repo-prefix-replace', 'http://localhost/',

-              '-r', repo_prefix + "1.0/ppc64le/os"], timeout=1800)

+             [

+                 "/usr/bin/mergerepo_c",

+                 "--method",

+                 "nvr",

+                 "-o",

+                 c.result_repo_dir + "/foo-1/ppc64le",

+                 "--repo-prefix-search",

+                 "%s/pulp_repo_cache" % conf.target_dir,

+                 "--repo-prefix-replace",

+                 "http://localhost/",

+                 "-r",

+                 repo_prefix + "1.0/ppc64le/os",

+             ],

+             timeout=1800,

+         )

  

          download_repodata.assert_any_call(

-             repo_prefix + "1.0/x86_64/os",

-             "http://localhost/content/1.0/x86_64/os")

+             repo_prefix + "1.0/x86_64/os", "http://localhost/content/1.0/x86_64/os"

+         )

          download_repodata.assert_any_call(

-             repo_prefix + "1.1/x86_64/os",

-             "http://localhost/content/1.1/x86_64/os")

+             repo_prefix + "1.1/x86_64/os", "http://localhost/content/1.1/x86_64/os"

+         )

          download_repodata.assert_any_call(

-             repo_prefix + "1.0/ppc64le/os",

-             "http://localhost/content/1.0/ppc64le/os")

+             repo_prefix + "1.0/ppc64le/os", "http://localhost/content/1.0/ppc64le/os"

+         )

  

      @patch("odcs.server.mergerepo.execute_cmd")

      @patch("odcs.server.mergerepo.makedirs")

      @patch("odcs.server.mergerepo.Lock")

      @patch("odcs.server.mergerepo.MergeRepo._download_repodata")

      def test_pulp_compose_find_latest_version(

-             self, download_repodata, lock, makedirs, execute_cmd,

-             pulp_rest_post):

-         c = Compose.create(

-             db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

+         self, download_repodata, lock, makedirs, execute_cmd, pulp_rest_post

+     ):

+         c = Compose.create(db.session, "me", PungiSourceType.PULP, "foo-1", 0, 3600)

          db.session.commit()

  

          pulp_rest_post.return_value = [
@@ -264,7 +285,8 @@ 

                      "sigkeys": ["SIG1", "SIG2"],

                      "product_versions": '["1.1"]',

                  }

-             })

+             },

+         )

  

          makedirs.assert_not_called()

  

file modified
+311 -203
@@ -31,7 +31,12 @@ 

  from kobo.conf import PyConfigParser

  

  from odcs.server.pungi import (

-     Pungi, PungiConfig, PungiSourceType, PungiLogs, RawPungiConfig)

+     Pungi,

+     PungiConfig,

+     PungiSourceType,

+     PungiLogs,

+     RawPungiConfig,

+ )

  from odcs.server import conf, db

  from odcs.server.models import Compose

  from odcs.common.types import COMPOSE_STATES, COMPOSE_RESULTS, COMPOSE_FLAGS
@@ -42,7 +47,6 @@ 

  

  

  class TestPungiConfig(unittest.TestCase):

- 

      def setUp(self):

          super(TestPungiConfig, self).setUp()

  
@@ -79,9 +83,15 @@ 

          )

  

      def test_pungi_config_tag(self):

-         pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG,

-                                 "f26", packages=["file"], sigkeys="123 456",

-                                 arches=["ppc64", "s390"])

+         pungi_cfg = PungiConfig(

+             "MBS-512",

+             "1",

+             PungiSourceType.KOJI_TAG,

+             "f26",

+             packages=["file"],

+             sigkeys="123 456",

+             arches=["ppc64", "s390"],

+         )

          cfg = pungi_cfg.get_pungi_config()

          variants = pungi_cfg.get_variants_config()

          comps = pungi_cfg.get_comps_config()
@@ -90,17 +100,17 @@ 

          self.assertTrue(variants.find("ppc64") != -1)

          self.assertTrue(variants.find("s390") != -1)

          self.assertTrue(comps.find("file</packagereq>") != -1)

-         self.assertTrue(cfg.find("sigkeys = [\"123\", \"456\"]"))

+         self.assertTrue(cfg.find('sigkeys = ["123", "456"]'))

  

      def test_get_pungi_conf(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                     "testmodule:master:1:1")

+             pungi_cfg = PungiConfig(

+                 "MBS-512", "1", PungiSourceType.MODULE, "testmodule:master:1:1"

+             )

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

              self.assertEqual(cfg["release_name"], "MBS-512")
@@ -111,12 +121,13 @@ 

  

      @patch("odcs.server.pungi.log")

      def test_get_pungi_conf_exception(self, log):

-         pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                 "testmodule:master:1:1")

-         _, mock_path = tempfile.mkstemp(suffix='-pungi.conf')

-         with open(mock_path, 'w') as f:

+         pungi_cfg = PungiConfig(

+             "MBS-512", "1", PungiSourceType.MODULE, "testmodule:master:1:1"

+         )

+         _, mock_path = tempfile.mkstemp(suffix="-pungi.conf")

+         with open(mock_path, "w") as f:

              # write an invalid jinja2 template file

-             f.write('{{\n')

+             f.write("{{\n")

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

              pungi_cfg.get_pungi_config()

              log.exception.assert_called_once()
@@ -124,41 +135,45 @@ 

  

      def test_get_pungi_conf_iso(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                     "testmodule:master:1:1",

-                                     results=COMPOSE_RESULTS["iso"])

+             pungi_cfg = PungiConfig(

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.MODULE,

+                 "testmodule:master:1:1",

+                 results=COMPOSE_RESULTS["iso"],

+             )

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

              self.assertTrue("createiso" not in cfg["skip_phases"])

  

      def test_get_pungi_conf_boot_iso(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                     "testmodule:master:1:1",

-                                     results=COMPOSE_RESULTS["boot.iso"])

+             pungi_cfg = PungiConfig(

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.MODULE,

+                 "testmodule:master:1:1",

+                 results=COMPOSE_RESULTS["boot.iso"],

+             )

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

              self.assertTrue("buildinstall" not in cfg["skip_phases"])

  

      def test_get_pungi_conf_koji_inherit(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG,

-                                     "f26")

+             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG, "f26")

  

              pungi_cfg.pkgset_koji_inherit = False

              template = pungi_cfg.get_pungi_config()
@@ -172,82 +187,103 @@ 

  

      def test_get_pungi_conf_check_deps(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG,

-                                     "f26")

+             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG, "f26")

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

              self.assertIs(cfg["check_deps"], False)

  

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG,

-                                     "f26", flags=COMPOSE_FLAGS["check_deps"])

+             pungi_cfg = PungiConfig(

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

+                 flags=COMPOSE_FLAGS["check_deps"],

+             )

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

              self.assertIs(cfg["check_deps"], True)

  

      def test_get_pungi_conf_multilib(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG,

-                                     "f26", multilib_arches=["x86_64", "s390x"],

-                                     multilib_method=3)

+             pungi_cfg = PungiConfig(

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

+                 multilib_arches=["x86_64", "s390x"],

+                 multilib_method=3,

+             )

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

-             self.assertEqual(set(cfg["multilib"][0][1].keys()), set(["s390x", "x86_64"]))

+             self.assertEqual(

+                 set(cfg["multilib"][0][1].keys()), set(["s390x", "x86_64"])

+             )

              for variant, arch_method_dict in cfg["multilib"]:

                  for method in arch_method_dict.values():

-                     self.assertEqual(set(method), set(['runtime', 'devel']))

+                     self.assertEqual(set(method), set(["runtime", "devel"]))

  

      def test_get_pungi_conf_pkgset_koji_builds(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG,

-                                     "f26", builds=["foo-1-1", "bar-1-1"])

+             pungi_cfg = PungiConfig(

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

+                 builds=["foo-1-1", "bar-1-1"],

+             )

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

-             self.assertEqual(set(cfg["pkgset_koji_builds"]),

-                              set(["foo-1-1", "bar-1-1"]))

-             self.assertEqual(cfg["additional_packages"],

-                              [(u'^Temporary$', {u'*': [u'*']})])

+             self.assertEqual(

+                 set(cfg["pkgset_koji_builds"]), set(["foo-1-1", "bar-1-1"])

+             )

+             self.assertEqual(

+                 cfg["additional_packages"], [(u"^Temporary$", {u"*": [u"*"]})]

+             )

  

      def test_get_pungi_conf_modular_koji_tags(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

              pungi_cfg = PungiConfig(

-                 "MBS-512", "1", PungiSourceType.KOJI_TAG, "f26",

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

                  modular_koji_tags="f26-modules",

                  module_defaults_url="git://localhost.tld/x.git master",

-                 packages=["foo"])

+                 packages=["foo"],

+             )

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

-             self.assertEqual(set(cfg["pkgset_koji_module_tag"]),

-                              set(["f26-modules"]))

+             self.assertEqual(set(cfg["pkgset_koji_module_tag"]), set(["f26-modules"]))

              self.assertEqual(cfg["gather_method"], "hybrid")

-             self.assertEqual(cfg["module_defaults_dir"], {

-                 'branch': 'master',

-                 'dir': '.',

-                 'repo': 'git://localhost.tld/x.git',

-                 'scm': 'git'})

+             self.assertEqual(

+                 cfg["module_defaults_dir"],

+                 {

+                     "branch": "master",

+                     "dir": ".",

+                     "repo": "git://localhost.tld/x.git",

+                     "scm": "git",

+                 },

+             )

  

              # The "<modules>" must appear in the variants.xml after the "<groups>".

              variants = pungi_cfg.get_variants_config()
@@ -257,81 +293,90 @@ 

  

      def test_get_pungi_conf_source_type_build(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.BUILD,

-                                     "x", builds=["foo-1-1", "bar-1-1"])

+             pungi_cfg = PungiConfig(

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.BUILD,

+                 "x",

+                 builds=["foo-1-1", "bar-1-1"],

+             )

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

-             self.assertEqual(cfg["pkgset_koji_tag"], '')

-             self.assertEqual(set(cfg["pkgset_koji_builds"]),

-                              set(["foo-1-1", "bar-1-1"]))

-             self.assertEqual(cfg["additional_packages"],

-                              [(u'^Temporary$', {u'*': [u'*']})])

+             self.assertEqual(cfg["pkgset_koji_tag"], "")

+             self.assertEqual(

+                 set(cfg["pkgset_koji_builds"]), set(["foo-1-1", "bar-1-1"])

+             )

+             self.assertEqual(

+                 cfg["additional_packages"], [(u"^Temporary$", {u"*": [u"*"]})]

+             )

  

      def test_get_pungi_conf_source_type_koji_tag_all_packages(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

-             pungi_cfg = PungiConfig(

-                 "MBS-512", "1", PungiSourceType.KOJI_TAG, "f26")

+             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.KOJI_TAG, "f26")

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

-             self.assertEqual(cfg["pkgset_koji_tag"], 'f26')

-             self.assertEqual(cfg["additional_packages"],

-                              [('^Temporary$', {'*': ['*']})])

+             self.assertEqual(cfg["pkgset_koji_tag"], "f26")

+             self.assertEqual(

+                 cfg["additional_packages"], [("^Temporary$", {"*": ["*"]})]

+             )

  

      def test_get_pungi_conf_source_type_koji_tag_some_packages(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

              pungi_cfg = PungiConfig(

-                 "MBS-512", "1", PungiSourceType.KOJI_TAG, "f26",

-                 packages=["file"])

+                 "MBS-512", "1", PungiSourceType.KOJI_TAG, "f26", packages=["file"]

+             )

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

-             self.assertEqual(cfg["pkgset_koji_tag"], 'f26')

+             self.assertEqual(cfg["pkgset_koji_tag"], "f26")

              self.assertTrue("additional_packages" not in cfg)

  

      def test_get_pungi_conf_lookaside_repos(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

              pungi_cfg = PungiConfig(

-                 "MBS-512", "1", PungiSourceType.KOJI_TAG, "f26",

-                 lookaside_repos="foo bar")

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

+                 lookaside_repos="foo bar",

+             )

  

              template = pungi_cfg.get_pungi_config()

              cfg = self._load_pungi_cfg(template)

              self.assertEqual(

-                 cfg["gather_lookaside_repos"],

-                 [(u'^.*$', {u'*': [u'foo', u'bar']})])

+                 cfg["gather_lookaside_repos"], [(u"^.*$", {u"*": [u"foo", u"bar"]})]

+             )

  

      def test_get_pungi_conf_include_devel_modules(self):

          _, mock_path = tempfile.mkstemp()

-         template_path = os.path.abspath(os.path.join(test_dir,

-                                                      "../conf/pungi.conf"))

+         template_path = os.path.abspath(os.path.join(test_dir, "../conf/pungi.conf"))

          shutil.copy2(template_path, mock_path)

  

          with patch("odcs.server.pungi.conf.pungi_conf_path", mock_path):

              pungi_cfg = PungiConfig(

-                 "MBS-512", "1", PungiSourceType.MODULE,

-                 "foo:1:1:1 foo-devel:1:1:1 bar-devel:1:1:1")

+                 "MBS-512",

+                 "1",

+                 PungiSourceType.MODULE,

+                 "foo:1:1:1 foo-devel:1:1:1 bar-devel:1:1:1",

+             )

  

              self.assertEqual(

                  pungi_cfg.source, "foo:1:1:1 foo-devel:1:1:1 bar-devel:1:1:1"
@@ -339,17 +384,15 @@ 

  

  

  class FakePyConfigParser(dict):

- 

      def load_from_file(self, *args, **kwargs):

          pass

  

  

  class TestPungi(ModelsBaseTest):

- 

      def setUp(self):

          super(TestPungi, self).setUp()

  

-         def mocked_clone_repo(url, dest, branch='master', commit=None):

+         def mocked_clone_repo(url, dest, branch="master", commit=None):

              makedirs(dest)

              makedirs(os.path.join(dest, "another"))

              with open(os.path.join(dest, "pungi.conf"), "w") as fd:
@@ -396,40 +439,47 @@ 

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run(self, execute_cmd):

-         pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                 "testmodule:master:1:1")

+         pungi_cfg = PungiConfig(

+             "MBS-512", "1", PungiSourceType.MODULE, "testmodule:master:1:1"

+         )

          pungi = Pungi(1, pungi_cfg)

          pungi.run(self.compose)

  

-         self.makedirs.assert_called_with(

-             AnyStringWith("test_composes/odcs-1/"))

-         self.makedirs.assert_called_with(

-             AnyStringWith("work/global"))

+         self.makedirs.assert_called_with(AnyStringWith("test_composes/odcs-1/"))

+         self.makedirs.assert_called_with(AnyStringWith("work/global"))

          self.ci_dump.assert_called_once_with(

-             AnyStringWith("work/global/composeinfo-base.json"))

+             AnyStringWith("work/global/composeinfo-base.json")

+         )

  

          execute_cmd.assert_called_once_with(

-             ['pungi-koji', AnyStringWith('pungi.conf'),

-              "--no-latest-link",

-              AnyStringWith('--compose-dir='), '--test'],

-             cwd=AnyStringWith('/tmp/'), timeout=3600,

+             [

+                 "pungi-koji",

+                 AnyStringWith("pungi.conf"),

+                 "--no-latest-link",

+                 AnyStringWith("--compose-dir="),

+                 "--test",

+             ],

+             cwd=AnyStringWith("/tmp/"),

+             timeout=3600,

              stderr=AnyStringWith("pungi-stderr.log"),

-             stdout=AnyStringWith("pungi-stdout.log"))

+             stdout=AnyStringWith("pungi-stdout.log"),

+         )

  

      @patch("odcs.server.utils.execute_cmd")

      @patch("odcs.server.pungi.PyConfigParser")

      def test_pungi_run_cts(self, py_config_parser, execute_cmd):

          self.patch_ci_dump.stop()

-         py_config_parser.return_value = FakePyConfigParser({

-             "cts_url": "https://cts.localhost.tld/",

-             "cts_keytab": "/tmp/some.keytab",

-         })

+         py_config_parser.return_value = FakePyConfigParser(

+             {"cts_url": "https://cts.localhost.tld/", "cts_keytab": "/tmp/some.keytab"}

+         )

  

          def fake_execute_cmd(*args, **kwargs):

              # Fake `execute_cmd` method which creates composeinfo-base.json file

              # and waits for three seconds to test that ODCS picks up the compose

              # ID from this file.

-             p = os.path.join(self.compose.toplevel_dir, "work", "global", "composeinfo-base.json")

+             p = os.path.join(

+                 self.compose.toplevel_dir, "work", "global", "composeinfo-base.json"

+             )

              makedirs(os.path.dirname(p))

              ci = ComposeInfo()

              ci.compose.id = "Fedora-Rawhide-20200517.n.1"
@@ -447,21 +497,27 @@ 

  

          execute_cmd.side_effect = fake_execute_cmd

  

-         pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                 "testmodule:master:1:1")

+         pungi_cfg = PungiConfig(

+             "MBS-512", "1", PungiSourceType.MODULE, "testmodule:master:1:1"

+         )

          pungi = Pungi(1, pungi_cfg)

          pungi.run(self.compose)

  

-         self.makedirs.assert_called_with(

-             AnyStringWith("test_composes/odcs-1"))

+         self.makedirs.assert_called_with(AnyStringWith("test_composes/odcs-1"))

  

          execute_cmd.assert_called_once_with(

-             ['pungi-koji', AnyStringWith('pungi.conf'),

-              "--no-latest-link",

-              AnyStringWith('--compose-dir='), '--test'],

-             cwd=AnyStringWith('/tmp/'), timeout=3600,

+             [

+                 "pungi-koji",

+                 AnyStringWith("pungi.conf"),

+                 "--no-latest-link",

+                 AnyStringWith("--compose-dir="),

+                 "--test",

+             ],

+             cwd=AnyStringWith("/tmp/"),

+             timeout=3600,

              stderr=AnyStringWith("pungi-stderr.log"),

-             stdout=AnyStringWith("pungi-stdout.log"))

+             stdout=AnyStringWith("pungi-stdout.log"),

+         )

  

          self.assertEqual(self.compose.pungi_compose_id, "Fedora-Rawhide-20200517.n.1")

  
@@ -473,26 +529,31 @@ 

              execute_cmd.reset_mock()

  

              self.compose.compose_type = compose_type

-             pungi_cfg = PungiConfig("MBS-512", "1", PungiSourceType.MODULE,

-                                     "testmodule:master:1:1")

+             pungi_cfg = PungiConfig(

+                 "MBS-512", "1", PungiSourceType.MODULE, "testmodule:master:1:1"

+             )

              pungi = Pungi(1, pungi_cfg)

              pungi.run(self.compose)

  

-             self.makedirs.assert_called_with(

-                 AnyStringWith("test_composes/odcs-1/"))

-             self.makedirs.assert_called_with(

-                 AnyStringWith("work/global"))

+             self.makedirs.assert_called_with(AnyStringWith("test_composes/odcs-1/"))

+             self.makedirs.assert_called_with(AnyStringWith("work/global"))

              self.ci_dump.assert_called_once_with(

-                 AnyStringWith("work/global/composeinfo-base.json"))

+                 AnyStringWith("work/global/composeinfo-base.json")

+             )

  

              execute_cmd.assert_called_once_with(

-                 ['pungi-koji', AnyStringWith('pungi.conf'),

-                  "--no-latest-link",

-                  AnyStringWith('--compose-dir='),

-                  '--%s' % (compose_type or "test")],

-                 cwd=AnyStringWith('/tmp/'), timeout=3600,

+                 [

+                     "pungi-koji",

+                     AnyStringWith("pungi.conf"),

+                     "--no-latest-link",

+                     AnyStringWith("--compose-dir="),

+                     "--%s" % (compose_type or "test"),

+                 ],

+                 cwd=AnyStringWith("/tmp/"),

+                 timeout=3600,

                  stderr=AnyStringWith("pungi-stderr.log"),

-                 stdout=AnyStringWith("pungi-stdout.log"))

+                 stdout=AnyStringWith("pungi-stdout.log"),

+             )

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run_raw_config(self, execute_cmd):
@@ -501,38 +562,46 @@ 

              with open(os.path.join(topdir, "pungi.conf"), "r") as f:

                  data = f.read()

                  self.assertTrue("fake pungi conf 1" in data)

+ 

          execute_cmd.side_effect = mocked_execute_cmd

  

          fake_raw_config_urls = {

-             'pungi.conf': {

+             "pungi.conf": {

                  "url": "http://localhost/test.git",

                  "config_filename": "pungi.conf",

              }

          }

-         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

-             pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+         with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

+             pungi = Pungi(1, RawPungiConfig("pungi.conf#hash"))

              pungi.run(self.compose)

  

-         self.makedirs.assert_called_with(

-             AnyStringWith("test_composes/odcs-1/"))

-         self.makedirs.assert_called_with(

-             AnyStringWith("work/global"))

+         self.makedirs.assert_called_with(AnyStringWith("test_composes/odcs-1/"))

+         self.makedirs.assert_called_with(AnyStringWith("work/global"))

          self.ci_dump.assert_called_once_with(

-             AnyStringWith("work/global/composeinfo-base.json"))

+             AnyStringWith("work/global/composeinfo-base.json")

+         )

  

          execute_cmd.assert_called_once()

          self.clone_repo.assert_called_once_with(

-             'http://localhost/test.git', AnyStringWith("/raw_config_repo"),

-             commit='hash')

+             "http://localhost/test.git",

+             AnyStringWith("/raw_config_repo"),

+             commit="hash",

+         )

          compose_date = time.strftime("%Y%m%d", time.localtime())

-         self.assertEqual(self.compose.pungi_compose_id,

-                          "compose-1-10-%s.t.0" % compose_date)

+         self.assertEqual(

+             self.compose.pungi_compose_id, "compose-1-10-%s.t.0" % compose_date

+         )

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run_raw_config_respin(self, execute_cmd):

          compose = Compose.create(

-             db.session, "me", PungiSourceType.RAW_CONFIG, "foo",

-             COMPOSE_RESULTS["repository"], 3600)

+             db.session,

+             "me",

+             PungiSourceType.RAW_CONFIG,

+             "foo",

+             COMPOSE_RESULTS["repository"],

+             3600,

+         )

          db.session.add(compose)

          db.session.commit()

  
@@ -541,22 +610,22 @@ 

              with open(os.path.join(topdir, "pungi.conf"), "r") as f:

                  data = f.read()

                  self.assertTrue("fake pungi conf 1" in data)

+ 

          execute_cmd.side_effect = mocked_execute_cmd

  

          fake_raw_config_urls = {

-             'pungi.conf': {

+             "pungi.conf": {

                  "url": "http://localhost/test.git",

                  "config_filename": "pungi.conf",

              }

          }

-         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

-             pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+         with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

+             pungi = Pungi(1, RawPungiConfig("pungi.conf#hash"))

              pungi.run(compose)

              pungi.run(compose)

  

          compose_date = time.strftime("%Y%m%d", time.localtime())

-         self.assertEqual(compose.pungi_compose_id,

-                          "compose-1-10-%s.t.1" % compose_date)

+         self.assertEqual(compose.pungi_compose_id, "compose-1-10-%s.t.1" % compose_date)

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run_raw_config_subpath(self, execute_cmd):
@@ -565,100 +634,133 @@ 

              with open(os.path.join(topdir, "pungi.conf"), "r") as f:

                  data = f.read()

                  self.assertTrue("fake pungi conf 2" in data)

+ 

          execute_cmd.side_effect = mocked_execute_cmd

  

          fake_raw_config_urls = {

-             'pungi.conf': {

+             "pungi.conf": {

                  "url": "http://localhost/test.git",

                  "config_filename": "pungi.conf",

                  "path": "another",

              }

          }

-         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

-             pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+         with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

+             pungi = Pungi(1, RawPungiConfig("pungi.conf#hash"))

              pungi.run(self.compose)

  

          execute_cmd.assert_called_once()

          self.clone_repo.assert_called_once_with(

-             'http://localhost/test.git', AnyStringWith("/raw_config_repo"),

-             commit='hash')

+             "http://localhost/test.git",

+             AnyStringWith("/raw_config_repo"),

+             commit="hash",

+         )

  

      @patch("odcs.server.utils.execute_cmd")

      def test_raw_config_validate(self, execute_cmd):

          fake_raw_config_urls = {

-             'pungi.conf': {

+             "pungi.conf": {

                  "url": "http://localhost/test.git",

                  "config_filename": "pungi.conf",

-                 "schema_override": "/etc/odcs/extra_override.json"

+                 "schema_override": "/etc/odcs/extra_override.json",

              }

          }

-         with patch.object(conf, 'raw_config_schema_override', new="/etc/odcs/default_override.json"):

-             with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

-                 with patch.object(conf, 'pungi_config_validate', new="pungi-config-validate"):

-                     pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+         with patch.object(

+             conf, "raw_config_schema_override", new="/etc/odcs/default_override.json"

+         ):

+             with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

+                 with patch.object(

+                     conf, "pungi_config_validate", new="pungi-config-validate"

+                 ):

+                     pungi = Pungi(1, RawPungiConfig("pungi.conf#hash"))

                      pungi.run(self.compose)

  

-         self.assertEqual(execute_cmd.mock_calls[0], call(

-             ['pungi-config-validate', '--old-composes',

-              '--schema-override', '/etc/odcs/default_override.json',

-              '--schema-override', '/etc/odcs/extra_override.json',

-              AnyStringWith('pungi.conf')],

-             stderr=AnyStringWith("pungi-config-validate-stderr.log"),

-             stdout=AnyStringWith("pungi-config-validate-stdout.log")))

+         self.assertEqual(

+             execute_cmd.mock_calls[0],

+             call(

+                 [

+                     "pungi-config-validate",

+                     "--old-composes",

+                     "--schema-override",

+                     "/etc/odcs/default_override.json",

+                     "--schema-override",

+                     "/etc/odcs/extra_override.json",

+                     AnyStringWith("pungi.conf"),

+                 ],

+                 stderr=AnyStringWith("pungi-config-validate-stderr.log"),

+                 stdout=AnyStringWith("pungi-config-validate-stdout.log"),

+             ),

+         )

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run_raw_config_custom_timeout(self, execute_cmd):

          fake_raw_config_urls = {

-             'pungi.conf': {

+             "pungi.conf": {

                  "url": "http://localhost/test.git",

                  "config_filename": "pungi.conf",

                  "pungi_timeout": 7200,

              }

          }

-         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

-             pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+         with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

+             pungi = Pungi(1, RawPungiConfig("pungi.conf#hash"))

              pungi.run(self.compose)

  

          execute_cmd.assert_called_once_with(

-             ['pungi-koji', AnyStringWith('pungi.conf'),

-              "--no-latest-link",

-              AnyStringWith('--compose-dir='), '--test'],

-             cwd=AnyStringWith('/tmp/'), timeout=7200,

+             [

+                 "pungi-koji",

+                 AnyStringWith("pungi.conf"),

+                 "--no-latest-link",

+                 AnyStringWith("--compose-dir="),

+                 "--test",

+             ],

+             cwd=AnyStringWith("/tmp/"),

+             timeout=7200,

              stderr=AnyStringWith("pungi-stderr.log"),

-             stdout=AnyStringWith("pungi-stdout.log"))

+             stdout=AnyStringWith("pungi-stdout.log"),

+         )

  

      @patch("odcs.server.utils.execute_cmd")

      def test_pungi_run_raw_config_label(self, execute_cmd):

          self.compose.label = "Alpha-0.1"

  

          fake_raw_config_urls = {

-             'pungi.conf': {

+             "pungi.conf": {

                  "url": "http://localhost/test.git",

                  "config_filename": "pungi.conf",

                  "pungi_timeout": 7200,

              }

          }

-         with patch.object(conf, 'raw_config_urls', new=fake_raw_config_urls):

-             pungi = Pungi(1, RawPungiConfig('pungi.conf#hash'))

+         with patch.object(conf, "raw_config_urls", new=fake_raw_config_urls):

+             pungi = Pungi(1, RawPungiConfig("pungi.conf#hash"))

              pungi.run(self.compose)

  

          execute_cmd.assert_called_once_with(

-             ['pungi-koji', AnyStringWith('pungi.conf'),

-              "--no-latest-link",

-              AnyStringWith('--compose-dir='), '--test',

-              '--label=Alpha-0.1'],

-             cwd=AnyStringWith('/tmp/'), timeout=7200,

+             [

+                 "pungi-koji",

+                 AnyStringWith("pungi.conf"),

+                 "--no-latest-link",

+                 AnyStringWith("--compose-dir="),

+                 "--test",

+                 "--label=Alpha-0.1",

+             ],

+             cwd=AnyStringWith("/tmp/"),

+             timeout=7200,

              stderr=AnyStringWith("pungi-stderr.log"),

-             stdout=AnyStringWith("pungi-stdout.log"))

+             stdout=AnyStringWith("pungi-stdout.log"),

+         )

  

  

  class TestPungiLogs(ModelsBaseTest):

- 

      def setUp(self):

          super(TestPungiLogs, self).setUp()

          self.compose = Compose.create(

-             db.session, "me", PungiSourceType.KOJI_TAG, "tag",

-             COMPOSE_RESULTS["repository"], 3600, packages="ed")

+             db.session,

+             "me",

+             PungiSourceType.KOJI_TAG,

+             "tag",

+             COMPOSE_RESULTS["repository"],

+             3600,

+             packages="ed",

+         )

          self.compose.state = COMPOSE_STATES["failed"]

          db.session.add(self.compose)

          db.session.commit()
@@ -676,9 +778,10 @@ 

  For more details see {0}/odcs-717-1-20180323.n.0/work/x86_64/pungi/Temporary.x86_64.log

  2018-03-23 03:38:42 [ERROR   ] Extended traceback in: {0}/odcs-717-1-20180323.n.0/logs/global/traceback.global.log

  2018-03-23 03:38:42 [CRITICAL] Compose failed: {0}/odcs-717-1-20180323.n.0

-         """.format(conf.target_dir)

-         patched_open.return_value = mock_open(

-             read_data=pungi_log).return_value

+         """.format(

+             conf.target_dir

+         )

+         patched_open.return_value = mock_open(read_data=pungi_log).return_value

  

          pungi_logs = PungiLogs(self.compose)

          errors = pungi_logs.get_error_string()
@@ -686,16 +789,19 @@ 

              errors,

              "Compose run failed: No such entry in table tag: tag\n"

              "Compose run failed: ERROR running command: pungi -G\n"

-             "For more details see http://localhost/odcs/odcs-717-1-20180323.n.0/work/x86_64/pungi/Temporary.x86_64.log\n")

+             "For more details see http://localhost/odcs/odcs-717-1-20180323.n.0/work/x86_64/pungi/Temporary.x86_64.log\n",

+         )

  

      @patch("odcs.server.pungi.open", create=True)

      def test_error_string_too_many_errors(self, patched_open):

-         pungi_log = """

+         pungi_log = (

+             """

  2018-03-23 03:38:42 [INFO    ] Writing pungi config

  2018-03-22 17:10:49 [ERROR   ] Compose run failed: No such entry in table tag: tag

-         """ * 100

-         patched_open.return_value = mock_open(

-             read_data=pungi_log).return_value

+         """

+             * 100

+         )

+         patched_open.return_value = mock_open(read_data=pungi_log).return_value

  

          pungi_logs = PungiLogs(self.compose)

          errors = pungi_logs.get_error_string()
@@ -708,8 +814,7 @@ 

  2018-03-23 03:38:42 [INFO    ] Writing pungi config

  2018-03-23 03:38:42 [INFO    ] [BEGIN] Running pungi

          """

-         patched_open.return_value = mock_open(

-             read_data=pungi_log).return_value

+         patched_open.return_value = mock_open(read_data=pungi_log).return_value

  

          pungi_logs = PungiLogs(self.compose)

          errors = pungi_logs.get_error_string()
@@ -729,10 +834,13 @@ 

      @patch("odcs.server.pungi.open", create=True)

      def test_config_dump(self, patched_open):

          patched_open.return_value = mock_open(

-             read_data="fake\npungi\nconf\n").return_value

+             read_data="fake\npungi\nconf\n"

+         ).return_value

  

          pungi_logs = PungiLogs(self.compose)

          ret = pungi_logs.get_config_dump()

          self.assertEqual(ret, "fake\npungi\nconf\n")

  

-         patched_open.assert_called_once_with(AnyStringWith("logs/global/config-dump.global.log"), "r")

+         patched_open.assert_called_once_with(

+             AnyStringWith("logs/global/config-dump.global.log"), "r"

+         )

@@ -28,16 +28,13 @@ 

  

  

  RPMS_JSON = {

-     "header": {

-         "type": "productmd.rpms",

-         "version": "1.2"

-     },

+     "header": {"type": "productmd.rpms", "version": "1.2"},

      "payload": {

          "compose": {

              "date": "20181210",

              "id": "odcs-691-1-20181210.n.0",

              "respin": 0,

-             "type": "nightly"

+             "type": "nightly",

          },

          "rpms": {

              "Temporary": {
@@ -46,60 +43,67 @@ 

                          "flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.src": {

                              "category": "source",

                              "path": "Temporary/source/tree/Packages/f/flatpak-rpm-macros-29-6.module+125+c4f5c7f2.src.rpm",

-                             "sigkey": None

+                             "sigkey": None,

                          },

                          "flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.x86_64": {

                              "category": "binary",

                              "path": "Temporary/x86_64/os/Packages/f/flatpak-rpm-macros-29-6.module+125+c4f5c7f2.x86_64.rpm",

-                             "sigkey": None

-                         }

+                             "sigkey": None,

+                         },

                      },

                      "flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.src": {

                          "flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.src": {

                              "category": "source",

                              "path": "Temporary/source/tree/Packages/f/flatpak-runtime-config-29-4.module+125+c4f5c7f2.src.rpm",

-                             "sigkey": "sigkey1"

+                             "sigkey": "sigkey1",

                          },

                          "flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.x86_64": {

                              "category": "binary",

                              "path": "Temporary/x86_64/os/Packages/f/flatpak-runtime-config-29-4.module+125+c4f5c7f2.x86_64.rpm",

-                             "sigkey": "sigkey1"

-                         }

-                     }

+                             "sigkey": "sigkey1",

+                         },

+                     },

                  }

              }

-         }

-     }

+         },

+     },

  }

  

  

  @patch("odcs.server.pungi_compose.PungiCompose._fetch_json")

  class TestPungiCompose(unittest.TestCase):

- 

      def test_get_rpms_data(self, fetch_json):

          fetch_json.return_value = RPMS_JSON

          compose = PungiCompose("http://localhost/compose/Temporary")

          data = compose.get_rpms_data()

  

          expected = {

-             'sigkeys': set(['sigkey1', None]),

-             'arches': set(['x86_64']),

-             'builds': {

-                 'flatpak-rpm-macros-29-6.module+125+c4f5c7f2': set([

-                     'flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.src',

-                     'flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.x86_64']),

-                 'flatpak-runtime-config-29-4.module+125+c4f5c7f2': set([

-                     'flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.src',

-                     'flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.x86_64'])

-             }

+             "sigkeys": set(["sigkey1", None]),

+             "arches": set(["x86_64"]),

+             "builds": {

+                 "flatpak-rpm-macros-29-6.module+125+c4f5c7f2": set(

+                     [

+                         "flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.src",

+                         "flatpak-rpm-macros-0:29-6.module+125+c4f5c7f2.x86_64",

+                     ]

+                 ),

+                 "flatpak-runtime-config-29-4.module+125+c4f5c7f2": set(

+                     [

+                         "flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.src",

+                         "flatpak-runtime-config-0:29-4.module+125+c4f5c7f2.x86_64",

+                     ]

+                 ),

+             },

          }

  

          self.assertEqual(data, expected)

  

      def test_get_rpms_data_unknown_variant(self, fetch_json):

          fetch_json.return_value = RPMS_JSON

-         msg = ("The http://localhost/compose/metadata/rpms.json does not "

-                "contain payload -> rpms -> Workstation section")

+         msg = (

+             "The http://localhost/compose/metadata/rpms.json does not "

+             "contain payload -> rpms -> Workstation section"

+         )

          with six.assertRaisesRegex(self, ValueError, msg):

              compose = PungiCompose("http://localhost/compose/Workstation")

              compose.get_rpms_data()

@@ -42,8 +42,13 @@ 

          super(TestRemoveExpiredComposesThread, self).setUp()

  

          compose = Compose.create(

-             db.session, "unknown", PungiSourceType.MODULE, "testmodule-master",

-             COMPOSE_RESULTS["repository"], 60)

+             db.session,

+             "unknown",

+             PungiSourceType.MODULE,

+             "testmodule-master",

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

          db.session.add(compose)

          db.session.commit()

  
@@ -87,11 +92,16 @@ 

          db.session.expunge_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["removed"])

-         self.assertEqual(c.state_reason, 'Compose is expired.')

-         unlink.assert_has_calls([

-             mock.call(AnyStringWith("test_composes/nightly/compose-1-10-2020110.n.0")),

-             mock.call(AnyStringWith("test_composes/nightly/latest-compose-1")),

-             mock.call(AnyStringWith("test_composes/odcs-1"))])

+         self.assertEqual(c.state_reason, "Compose is expired.")

+         unlink.assert_has_calls(

+             [

+                 mock.call(

+                     AnyStringWith("test_composes/nightly/compose-1-10-2020110.n.0")

+                 ),

+                 mock.call(AnyStringWith("test_composes/nightly/latest-compose-1")),

+                 mock.call(AnyStringWith("test_composes/odcs-1")),

+             ]

+         )

  

      def test_a_compose_which_state_is_done_is_removed_keep_state_reason(self):

          """
@@ -107,7 +117,7 @@ 

          db.session.expunge_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["removed"])

-         self.assertEqual(c.state_reason, 'Generated successfully.\nCompose is expired.')

+         self.assertEqual(c.state_reason, "Generated successfully.\nCompose is expired.")

  

      def test_does_not_remove_a_compose_which_is_not_expired(self):

          """
@@ -142,31 +152,41 @@ 

          self.thread.do_work()

          self.assertEqual(

              remove_compose_dir.call_args_list,

-             [mock.call(os.path.join(conf.target_dir, "latest-odcs-96-1")),

-              mock.call(os.path.join(conf.target_dir, "odcs-96-1-20171005.n.0")),

-              mock.call(os.path.join(conf.target_dir, "odcs-96"))])

+             [

+                 mock.call(os.path.join(conf.target_dir, "latest-odcs-96-1")),

+                 mock.call(os.path.join(conf.target_dir, "odcs-96-1-20171005.n.0")),

+                 mock.call(os.path.join(conf.target_dir, "odcs-96")),

+             ],

+         )

  

      @patch("os.path.isdir")

      @patch("glob.glob")

      @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

-     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

-                   new={"releng-private": "/tmp/private"})

-     def test_remove_left_composes_extra_target_dir(self, remove_compose_dir, glob, isdir):

+     @patch.object(

+         odcs.server.config.Config,

+         "extra_target_dirs",

+         new={"releng-private": "/tmp/private"},

+     )

+     def test_remove_left_composes_extra_target_dir(

+         self, remove_compose_dir, glob, isdir

+     ):

          isdir.return_value = True

          self.thread.do_work()

          print(glob.call_args_list)

          self.assertEqual(

              glob.call_args_list,

-             [mock.call(os.path.join(conf.target_dir, "latest-odcs-*")),

-              mock.call("/tmp/private/latest-odcs-*"),

-              mock.call(os.path.join(conf.target_dir, "odcs-*")),

-              mock.call("/tmp/private/odcs-*")])

+             [

+                 mock.call(os.path.join(conf.target_dir, "latest-odcs-*")),

+                 mock.call("/tmp/private/latest-odcs-*"),

+                 mock.call(os.path.join(conf.target_dir, "odcs-*")),

+                 mock.call("/tmp/private/odcs-*"),

+             ],

+         )

  

      @patch("os.path.isdir")

      @patch("glob.glob")

      @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

-     def test_remove_left_composes_not_dir(

-             self, remove_compose_dir, glob, isdir):

+     def test_remove_left_composes_not_dir(self, remove_compose_dir, glob, isdir):

          isdir.return_value = False

          self._mock_glob(glob, ["latest-odcs-96-1"])

          self.thread.do_work()
@@ -175,8 +195,7 @@ 

      @patch("os.path.isdir")

      @patch("glob.glob")

      @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

-     def test_remove_left_composes_wrong_dir(

-             self, remove_compose_dir, glob, isdir):

+     def test_remove_left_composes_wrong_dir(self, remove_compose_dir, glob, isdir):

          isdir.return_value = True

          self._mock_glob(glob, ["latest-odcs-", "odcs-", "odcs-abc"])

          self.thread.do_work()
@@ -185,8 +204,7 @@ 

      @patch("os.path.isdir")

      @patch("glob.glob")

      @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

-     def test_remove_left_composes_valid_compose(

-             self, remove_compose_dir, glob, isdir):

+     def test_remove_left_composes_valid_compose(self, remove_compose_dir, glob, isdir):

          isdir.return_value = True

          self._mock_glob(glob, ["latest-odcs-1-1", "odcs-1-1-2017.n.0"])

          c = db.session.query(Compose).filter(Compose.id == 1).one()
@@ -200,7 +218,8 @@ 

      @patch("glob.glob")

      @patch("odcs.server.backend.RemoveExpiredComposesThread._remove_compose_dir")

      def test_remove_left_composes_expired_compose(

-             self, remove_compose_dir, glob, isdir):

+         self, remove_compose_dir, glob, isdir

+     ):

          isdir.return_value = True

          self._mock_glob(glob, ["latest-odcs-1-1", "odcs-1-1-2017.n.0"])

          c = db.session.query(Compose).filter(Compose.id == 1).one()
@@ -210,15 +229,17 @@ 

          self.thread.do_work()

          self.assertEqual(

              remove_compose_dir.call_args_list,

-             [mock.call(os.path.join(conf.target_dir, "latest-odcs-1-1")),

-              mock.call(os.path.join(conf.target_dir, "odcs-1-1-2017.n.0"))])

+             [

+                 mock.call(os.path.join(conf.target_dir, "latest-odcs-1-1")),

+                 mock.call(os.path.join(conf.target_dir, "odcs-1-1-2017.n.0")),

+             ],

+         )

  

      @patch("shutil.rmtree")

      @patch("os.unlink")

      @patch("os.path.realpath")

      @patch("os.path.exists")

-     def test_remove_compose_dir_symlink(

-             self, exists, realpath, unlink, rmtree):

+     def test_remove_compose_dir_symlink(self, exists, realpath, unlink, rmtree):

          exists.return_value = True

          toplevel_dir = "/odcs"

          realpath.return_value = "/odcs-real"
@@ -231,10 +252,10 @@ 

      @patch("os.unlink")

      @patch("os.path.realpath")

      @patch("os.path.exists")

-     def test_remove_compose_dir_broken_symlink(

-             self, exists, realpath, unlink, rmtree):

+     def test_remove_compose_dir_broken_symlink(self, exists, realpath, unlink, rmtree):

          def mocked_exists(p):

              return p != "/odcs-real"

+ 

          exists.side_effect = mocked_exists

          toplevel_dir = "/odcs"

          realpath.return_value = "/odcs-real"
@@ -247,8 +268,7 @@ 

      @patch("os.unlink")

      @patch("os.path.realpath")

      @patch("os.path.exists")

-     def test_remove_compose_dir_real_dir(

-             self, exists, realpath, unlink, rmtree):

+     def test_remove_compose_dir_real_dir(self, exists, realpath, unlink, rmtree):

          exists.return_value = True

          toplevel_dir = "/odcs"

          realpath.return_value = "/odcs"
@@ -261,8 +281,7 @@ 

      @patch("os.path.realpath")

      @patch("os.path.exists")

      @patch("odcs.server.backend.log.warning")

-     def test_remove_compose_rmtree_error(

-             self, log_warning, exists, realpath, unlink):

+     def test_remove_compose_rmtree_error(self, log_warning, exists, realpath, unlink):

          exists.return_value = True

          toplevel_dir = "/odcs"

          realpath.return_value = "/odcs-real"
@@ -270,4 +289,5 @@ 

          # This must not raise an exception.

          self.thread._remove_compose_dir(toplevel_dir)

          log_warning.assert_called_once_with(

-             AnyStringWith('Cannot remove some files in /odcs-real:'))

+             AnyStringWith("Cannot remove some files in /odcs-real:")

+         )

file modified
+1 -3
@@ -30,7 +30,6 @@ 

  

  

  class TestUtilsExecuteCmd(unittest.TestCase):

- 

      def setUp(self):

          super(TestUtilsExecuteCmd, self).setUp()

  
@@ -39,8 +38,7 @@ 

  

      def test_execute_cmd_timeout_called(self):

          start_time = time.time()

-         with six.assertRaisesRegex(

-                 self, RuntimeError, 'Compose has taken more time.*'):

+         with six.assertRaisesRegex(self, RuntimeError, "Compose has taken more time.*"):

              execute_cmd(["/usr/bin/sleep", "5"], timeout=1)

          stop_time = time.time()

  

file modified
+1055 -647
@@ -36,8 +36,12 @@ 

  

  from odcs.server import conf, db, app, login_manager, version

  from odcs.server.models import Compose, User

- from odcs.common.types import (COMPOSE_STATES, COMPOSE_RESULTS, COMPOSE_FLAGS,

-                                MULTILIB_METHODS)

+ from odcs.common.types import (

+     COMPOSE_STATES,

+     COMPOSE_RESULTS,

+     COMPOSE_FLAGS,

+     MULTILIB_METHODS,

+ )

  from odcs.server.pungi import PungiSourceType

  from .utils import ModelsBaseTest

  from odcs.server.api_utils import validate_json_data
@@ -51,7 +55,6 @@ 

  

  

  class TestValidateJSONData(unittest.TestCase):

- 

      def test_validate_json_data_allowed_dict(self):

          data = {"source": {"source": ""}}

          validate_json_data(data)
@@ -96,45 +99,35 @@ 

  

  

  class ViewBaseTest(ModelsBaseTest):

- 

      def setUp(self):

          super(ViewBaseTest, self).setUp()

  

          patched_allowed_clients = {

-             'groups': {

-                 'composer': {},

-                 'dev2': {

-                     'source_types': ['module']

-                 },

-                 'dev3': {

-                     'source_types': ['raw_config']

-                 }

+             "groups": {

+                 "composer": {},

+                 "dev2": {"source_types": ["module"]},

+                 "dev3": {"source_types": ["raw_config"]},

              },

-             'users': {

-                 'dev': {

-                     'arches': ['ppc64', 's390', 'x86_64']

+             "users": {

+                 "dev": {"arches": ["ppc64", "s390", "x86_64"]},

+                 "dev2": {

+                     "source_types": ["module", "raw_config"],

+                     "compose_types": ["test", "nightly"],

                  },

-                 'dev2': {

-                     'source_types': ['module', 'raw_config'],

-                     'compose_types': ["test", "nightly"]

+                 "dev3": {"source_types": ["tag"], "target_dirs": ["releng-private"]},

+                 "dev4": {

+                     "source_types": ["raw_config"],

+                     "raw_config_keys": ["pungi_cfg2"],

                  },

-                 'dev3': {

-                     'source_types': ['tag'],

-                     'target_dirs': ["releng-private"]

-                 },

-                 'dev4': {

-                     'source_types': ['raw_config'],

-                     'raw_config_keys': ["pungi_cfg2"]

-                 },

-             }

+             },

          }

-         patched_admins = {'groups': ['admin'], 'users': ['root']}

-         self.patch_allowed_clients = patch.object(odcs.server.auth.conf,

-                                                   'allowed_clients',

-                                                   new=patched_allowed_clients)

-         self.patch_admins = patch.object(odcs.server.auth.conf,

-                                          'admins',

-                                          new=patched_admins)

+         patched_admins = {"groups": ["admin"], "users": ["root"]}

+         self.patch_allowed_clients = patch.object(

+             odcs.server.auth.conf, "allowed_clients", new=patched_allowed_clients

+         )

+         self.patch_admins = patch.object(

+             odcs.server.auth.conf, "admins", new=patched_admins

+         )

          self.patch_allowed_clients.start()

          self.patch_admins.start()

  
@@ -154,9 +147,9 @@ 

              patch_auth_backend = None

              if user is not None:

                  # authentication is disabled with auth_backend=noauth

-                 patch_auth_backend = patch.object(odcs.server.auth.conf,

-                                                   'auth_backend',

-                                                   new='kerberos')

+                 patch_auth_backend = patch.object(

+                     odcs.server.auth.conf, "auth_backend", new="kerberos"

+                 )

                  patch_auth_backend.start()

                  if not User.find_user_by_name(user):

                      User.create_user(username=user)
@@ -171,8 +164,8 @@ 

                  else:

                      flask.g.groups = []

                  with self.client.session_transaction() as sess:

-                     sess['user_id'] = user

-                     sess['_fresh'] = True

+                     sess["user_id"] = user

+                     sess["_fresh"] = True

              try:

                  yield

              finally:
@@ -189,7 +182,8 @@ 

      def setUp(self):

          super(TestOpenIDCLogin, self).setUp()

          self.patch_auth_backend = patch.object(

-             odcs.server.auth.conf, 'auth_backend', new='openidc')

+             odcs.server.auth.conf, "auth_backend", new="openidc"

+         )

          self.patch_auth_backend.start()

  

      def tearDown(self):
@@ -197,46 +191,46 @@ 

          self.patch_auth_backend.stop()

  

      def test_openidc_post_unauthorized(self):

-         rv = self.client.post('/api/1/composes/', data="")

-         self.assertEqual(rv.status, '401 UNAUTHORIZED')

+         rv = self.client.post("/api/1/composes/", data="")

+         self.assertEqual(rv.status, "401 UNAUTHORIZED")

  

      def test_openidc_patch_unauthorized(self):

-         rv = self.client.patch('/api/1/composes/1')

-         self.assertEqual(rv.status, '401 UNAUTHORIZED')

+         rv = self.client.patch("/api/1/composes/1")

+         self.assertEqual(rv.status, "401 UNAUTHORIZED")

  

      def test_openidc_delete_unauthorized(self):

-         rv = self.client.delete('/api/1/composes/1')

-         self.assertEqual(rv.status, '401 UNAUTHORIZED')

+         rv = self.client.delete("/api/1/composes/1")

+         self.assertEqual(rv.status, "401 UNAUTHORIZED")

  

  

  class TestHandlingErrors(ViewBaseTest):

      """Test registered error handlers"""

  

-     @patch('odcs.server.views.ODCSAPI.delete')

+     @patch("odcs.server.views.ODCSAPI.delete")

      def test_bad_request_error(self, delete):

-         delete.side_effect = BadRequest('bad request to delete')

+         delete.side_effect = BadRequest("bad request to delete")

  

-         resp = self.client.delete('/api/1/composes/100')

+         resp = self.client.delete("/api/1/composes/100")

          data = json.loads(resp.get_data(as_text=True))

  

-         self.assertEqual('Bad Request', data['error'])

-         self.assertEqual(400, data['status'])

-         self.assertIn('bad request to delete', data['message'])

+         self.assertEqual("Bad Request", data["error"])

+         self.assertEqual(400, data["status"])

+         self.assertIn("bad request to delete", data["message"])

  

      def test_return_internal_server_error_if_error_is_not_caught(self):

          possible_errors = [

-             RuntimeError('runtime error'),

-             IndexError('out of scope'),

-             OSError('os error'),

+             RuntimeError("runtime error"),

+             IndexError("out of scope"),

+             OSError("os error"),

          ]

          for e in possible_errors:

-             with patch('odcs.server.views.filter_composes', side_effect=e):

-                 resp = self.client.get('/api/1/composes/')

+             with patch("odcs.server.views.filter_composes", side_effect=e):

+                 resp = self.client.get("/api/1/composes/")

                  data = json.loads(resp.get_data(as_text=True))

  

-                 self.assertEqual('Internal Server Error', data['error'])

-                 self.assertEqual(500, data['status'])

-                 self.assertEqual(str(e), data['message'])

+                 self.assertEqual("Internal Server Error", data["error"])

+                 self.assertEqual(500, data["status"])

+                 self.assertEqual(str(e), data["message"])

  

  

  class TestViews(ViewBaseTest):
@@ -244,8 +238,9 @@ 

  

      def setUp(self):

          super(TestViews, self).setUp()

-         self.oidc_base_namespace = patch.object(conf, 'oidc_base_namespace',

-                                                 new='http://example.com/')

+         self.oidc_base_namespace = patch.object(

+             conf, "oidc_base_namespace", new="http://example.com/"

+         )

          self.oidc_base_namespace.start()

  

      def tearDown(self):
@@ -253,94 +248,121 @@ 

          super(TestViews, self).tearDown()

  

      def setup_test_data(self):

-         self.initial_datetime = datetime(year=2016, month=1, day=1,

-                                          hour=0, minute=0, second=0)

+         self.initial_datetime = datetime(

+             year=2016, month=1, day=1, hour=0, minute=0, second=0

+         )

          with freeze_time(self.initial_datetime):

              self.c1 = Compose.create(

-                 db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "unknown",

+                 PungiSourceType.MODULE,

+                 "testmodule:master",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

              self.c2 = Compose.create(

-                 db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "me",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

              db.session.add(self.c1)

              db.session.add(self.c2)

              db.session.commit()

  

      def test_metrics(self):

-         rv = self.client.get('/api/1/metrics/')

+         rv = self.client.get("/api/1/metrics/")

          data = rv.get_data(as_text=True)

          self.assertTrue("HELP composes_total Total number of composes" in data)

  

      def test_index(self):

-         rv = self.client.get('/')

+         rv = self.client.get("/")

          self.assertEqual(rv.status_code, 200)

-         self.assertEqual(rv.content_type, 'text/html; charset=utf-8')

+         self.assertEqual(rv.content_type, "text/html; charset=utf-8")

          data = rv.get_data(as_text=True)

-         self.assertIn('On Demand Compose Service', data)

+         self.assertIn("On Demand Compose Service", data)

  

      def test_about(self):

-         rv = self.client.get('/api/1/about/')

+         rv = self.client.get("/api/1/about/")

          data = json.loads(rv.get_data(as_text=True))

          self.assertEqual(

              data,

-             {'version': version, 'auth_backend': 'noauth', 'raw_config_urls': {},

-              'allowed_clients': odcs.server.auth.conf.allowed_clients,

-              'sigkeys': []})

+             {

+                 "version": version,

+                 "auth_backend": "noauth",

+                 "raw_config_urls": {},

+                 "allowed_clients": odcs.server.auth.conf.allowed_clients,

+                 "sigkeys": [],

+             },

+         )

  

      def test_submit_invalid_json(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data="{")

+             rv = self.client.post("/api/1/composes/", data="{")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(rv.status, '400 BAD REQUEST')

+         self.assertEqual(rv.status, "400 BAD REQUEST")

          self.assertEqual(data["error"], "Bad Request")

          self.assertEqual(data["status"], 400)

          self.assertTrue(data["message"].find("Failed to decode JSON object") != -1)

  

      def test_submit_build(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         expected_json = {'source_type': 2, 'state': 0, 'time_done': None,

-                          'state_name': 'wait',

-                          'state_reason': None,

-                          'source': u'testmodule:master',

-                          'owner': u'dev',

-                          'result_repo': 'http://localhost/odcs/odcs-%d/compose/Temporary' % data['id'],

-                          'result_repofile': 'http://localhost/odcs/odcs-%d/compose/Temporary/odcs-%d.repo' % (data['id'], data['id']),

-                          'time_submitted': data["time_submitted"], 'id': data['id'],

-                          'time_started': None,

-                          'time_removed': None,

-                          'removed_by': None,

-                          'time_to_expire': data["time_to_expire"],

-                          'flags': [],

-                          'results': ['repository'],

-                          'sigkeys': '',

-                          'koji_event': None,

-                          'koji_task_id': None,

-                          'packages': None,

-                          'builds': None,

-                          'arches': 'x86_64',

-                          'multilib_arches': '',

-                          'multilib_method': 0,

-                          'lookaside_repos': '',

-                          'modular_koji_tags': None,

-                          'module_defaults_url': None,

-                          'label': None,

-                          'compose_type': 'test',

-                          'pungi_compose_id': None,

-                          'target_dir': 'default',

-                          'toplevel_url': 'http://localhost/odcs/odcs-%d' % data['id']}

+         expected_json = {

+             "source_type": 2,

+             "state": 0,

+             "time_done": None,

+             "state_name": "wait",

+             "state_reason": None,

+             "source": u"testmodule:master",

+             "owner": u"dev",

+             "result_repo": "http://localhost/odcs/odcs-%d/compose/Temporary"

+             % data["id"],

+             "result_repofile": "http://localhost/odcs/odcs-%d/compose/Temporary/odcs-%d.repo"

+             % (data["id"], data["id"]),

+             "time_submitted": data["time_submitted"],

+             "id": data["id"],

+             "time_started": None,

+             "time_removed": None,

+             "removed_by": None,

+             "time_to_expire": data["time_to_expire"],

+             "flags": [],

+             "results": ["repository"],

+             "sigkeys": "",

+             "koji_event": None,

+             "koji_task_id": None,

+             "packages": None,

+             "builds": None,

+             "arches": "x86_64",

+             "multilib_arches": "",

+             "multilib_method": 0,

+             "lookaside_repos": "",

+             "modular_koji_tags": None,

+             "module_defaults_url": None,

+             "label": None,

+             "compose_type": "test",

+             "pungi_compose_id": None,

+             "target_dir": "default",

+             "toplevel_url": "http://localhost/odcs/odcs-%d" % data["id"],

+         }

          self.assertEqual(data, expected_json)

  

          db.session.expire_all()
@@ -348,14 +370,17 @@ 

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_no_packages(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26'},

-                  'flags': ['no_deps']}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "tag", "source": "f26"}, "flags": ["no_deps"]}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["state_name"], "wait")
@@ -366,17 +391,23 @@ 

          self.assertEqual(c.packages, None)

  

      def test_submit_build_nodeps(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'flags': ['no_deps']}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "flags": ["no_deps"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['flags'], ['no_deps'])

+         self.assertEqual(data["flags"], ["no_deps"])

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()
@@ -384,17 +415,23 @@ 

          self.assertEqual(c.flags, COMPOSE_FLAGS["no_deps"])

  

      def test_submit_build_noinheritance(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'flags': ['no_inheritance']}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "flags": ["no_inheritance"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['flags'], ['no_inheritance'])

+         self.assertEqual(data["flags"], ["no_inheritance"])

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()
@@ -402,209 +439,298 @@ 

          self.assertEqual(c.flags, COMPOSE_FLAGS["no_inheritance"])

  

      def test_submit_build_boot_iso(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'results': ['boot.iso']}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "results": ["boot.iso"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(set(data['results']), set(['repository', 'boot.iso']))

+         self.assertEqual(set(data["results"]), set(["repository", "boot.iso"]))

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

          self.assertEqual(

-             c.results,

-             COMPOSE_RESULTS["boot.iso"] | COMPOSE_RESULTS["repository"])

+             c.results, COMPOSE_RESULTS["boot.iso"] | COMPOSE_RESULTS["repository"]

+         )

  

      def test_submit_build_with_koji_event(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed'],

-                             'koji_event': 123456}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "tag",

+                             "source": "f26",

+                             "packages": ["ed"],

+                             "koji_event": 123456,

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['koji_event'], 123456)

+         self.assertEqual(data["koji_event"], 123456)

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.koji_event, 123456)

  

      def test_submit_build_sigkeys(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed'],

-                             'sigkeys': ["123", "456"]}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "tag",

+                             "source": "f26",

+                             "packages": ["ed"],

+                             "sigkeys": ["123", "456"],

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['sigkeys'], '123 456')

+         self.assertEqual(data["sigkeys"], "123 456")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

-     @patch.object(odcs.server.config.Config, 'sigkeys', new_callable=PropertyMock)

+     @patch.object(odcs.server.config.Config, "sigkeys", new_callable=PropertyMock)

      def test_submit_build_default_sigkeys(self, sigkeys):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              sigkeys.return_value = ["x", "y"]

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "tag", "source": "f26", "packages": ["ed"]}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['sigkeys'], 'x y')

+         self.assertEqual(data["sigkeys"], "x y")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_arches(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'arches': ["ppc64", "s390"]}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "arches": ["ppc64", "s390"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['arches'], 'ppc64 s390')

+         self.assertEqual(data["arches"], "ppc64 s390")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_multilib_arches(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'arches': ["ppc64", "s390"], 'multilib_arches': ["x86_64", "ppc64le"]}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "arches": ["ppc64", "s390"],

+                         "multilib_arches": ["x86_64", "ppc64le"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['multilib_arches'], 'x86_64 ppc64le')

+         self.assertEqual(data["multilib_arches"], "x86_64 ppc64le")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_multilib_method(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'arches': ["ppc64", "s390"], 'multilib_method': ["runtime", "devel"]}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "arches": ["ppc64", "s390"],

+                         "multilib_method": ["runtime", "devel"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['multilib_method'],

-                          MULTILIB_METHODS["runtime"] | MULTILIB_METHODS["devel"])

+         self.assertEqual(

+             data["multilib_method"],

+             MULTILIB_METHODS["runtime"] | MULTILIB_METHODS["devel"],

+         )

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_multilib_method_unknown(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed']},

-                  'arches': ["ppc64", "s390"], 'multilib_method': ["foo", "devel"]}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26", "packages": ["ed"]},

+                         "arches": ["ppc64", "s390"],

+                         "multilib_method": ["foo", "devel"],

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(

-             data['message'], 'Unknown multilib method "foo"')

+         self.assertEqual(data["message"], 'Unknown multilib method "foo"')

  

      def test_submit_build_modular_koji_tags(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26',

-                             'modular_koji_tags': ['f26-modules']}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "tag",

+                             "source": "f26",

+                             "modular_koji_tags": ["f26-modules"],

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['modular_koji_tags'], "f26-modules")

+         self.assertEqual(data["modular_koji_tags"], "f26-modules")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_target_dir_unknown(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26'},

-                  'target_dir': 'foo'}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "tag", "source": "f26"}, "target_dir": "foo"}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['status'], 400)

-         self.assertEqual(data['error'], 'Bad Request')

-         self.assertEqual(data['message'], 'Unknown "target_dir" "foo"')

+         self.assertEqual(data["status"], 400)

+         self.assertEqual(data["error"], "Bad Request")

+         self.assertEqual(data["message"], 'Unknown "target_dir" "foo"')

  

-     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

-                   new={"releng-private": "/tmp/private"})

+     @patch.object(

+         odcs.server.config.Config,

+         "extra_target_dirs",

+         new={"releng-private": "/tmp/private"},

+     )

      def test_submit_build_target_not_allowed(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26'},

-                  'target_dir': 'releng-private'}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26"},

+                         "target_dir": "releng-private",

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['status'], 403)

-         self.assertEqual(data['error'], 'Forbidden')

+         self.assertEqual(data["status"], 403)

+         self.assertEqual(data["error"], "Forbidden")

          self.assertEqual(

-             data['message'],

-             'User dev not allowed to operate with compose with target_dirs=releng-private.')

+             data["message"],

+             "User dev not allowed to operate with compose with target_dirs=releng-private.",

+         )

  

-     @patch.object(odcs.server.config.Config, 'extra_target_dirs',

-                   new={"releng-private": "/tmp/private"})

+     @patch.object(

+         odcs.server.config.Config,

+         "extra_target_dirs",

+         new={"releng-private": "/tmp/private"},

+     )

      def test_submit_build_target_dir(self):

-         with self.test_request_context(user='dev3'):

+         with self.test_request_context(user="dev3"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26'},

-                  'target_dir': 'releng-private'}))

-             self.assertEqual(rv.status, '200 OK')

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "tag", "source": "f26"},

+                         "target_dir": "releng-private",

+                     }

+                 ),

+             )

+             self.assertEqual(rv.status, "200 OK")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['target_dir'], 'releng-private')

+         self.assertEqual(data["target_dir"], "releng-private")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()
@@ -612,110 +738,155 @@ 

          self.assertEqual(c.target_dir, "/tmp/private")

  

      def test_submit_build_module_defaults_url(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26',

-                             'module_defaults_url': 'git://localhost.tld/x.git',

-                             'module_defaults_commit': 'master'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "tag",

+                             "source": "f26",

+                             "module_defaults_url": "git://localhost.tld/x.git",

+                             "module_defaults_commit": "master",

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['module_defaults_url'], 'git://localhost.tld/x.git master')

+         self.assertEqual(

+             data["module_defaults_url"], "git://localhost.tld/x.git master"

+         )

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_module_defaults_url_no_branch(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26',

-                             'module_defaults_url': 'git://localhost.tld/x.git'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "tag",

+                             "source": "f26",

+                             "module_defaults_url": "git://localhost.tld/x.git",

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

-             self.assertEqual(data['status'], 400)

-             self.assertEqual(data['error'], 'Bad Request')

-             self.assertEqual(data['message'],

-                              'The "module_defaults_url" and "module_defaults_commit" '

-                              'must be used together.')

+             self.assertEqual(data["status"], 400)

+             self.assertEqual(data["error"], "Bad Request")

+             self.assertEqual(

+                 data["message"],

+                 'The "module_defaults_url" and "module_defaults_commit" '

+                 "must be used together.",

+             )

  

      def test_submit_build_duplicate_sources(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'foo:x foo:x foo:y'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "foo:x foo:x foo:y"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['source'].count("foo:x"), 1)

-         self.assertEqual(data['source'].count("foo:y"), 1)

+         self.assertEqual(data["source"].count("foo:x"), 1)

+         self.assertEqual(data["source"].count("foo:y"), 1)

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_submit_build_extra_builds(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'f26', 'packages': ['ed'],

-                             'builds': ['foo-1-1', 'bar-1-1']}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "tag",

+                             "source": "f26",

+                             "packages": ["ed"],

+                             "builds": ["foo-1-1", "bar-1-1"],

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['builds'], 'foo-1-1 bar-1-1')

+         self.assertEqual(data["builds"], "foo-1-1 bar-1-1")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

-         self.assertEqual(c.builds, 'foo-1-1 bar-1-1')

+         self.assertEqual(c.builds, "foo-1-1 bar-1-1")

  

      def test_submit_build_source_type_build(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'build', 'packages': ['ed'],

-                             'builds': ['foo-1-1', 'bar-1-1']}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "build",

+                             "packages": ["ed"],

+                             "builds": ["foo-1-1", "bar-1-1"],

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['builds'], 'foo-1-1 bar-1-1')

+         self.assertEqual(data["builds"], "foo-1-1 bar-1-1")

  

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

          self.assertEqual(c.source_type, PungiSourceType.BUILD)

-         self.assertEqual(c.builds, 'foo-1-1 bar-1-1')

+         self.assertEqual(c.builds, "foo-1-1 bar-1-1")

  

      def test_submit_build_resurrection_removed(self):

          self.c1.state = COMPOSE_STATES["removed"]

          self.c1.reused_id = 1

          db.session.commit()

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1')

+             rv = self.client.patch("/api/1/composes/1")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['id'], 3)

-         self.assertEqual(data['state_name'], 'wait')

-         self.assertEqual(data['source'], 'testmodule:master')

-         self.assertEqual(data['time_removed'], None)

+         self.assertEqual(data["id"], 3)

+         self.assertEqual(data["state_name"], "wait")

+         self.assertEqual(data["source"], "testmodule:master")

+         self.assertEqual(data["time_removed"], None)

  

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.reused_id, None)
@@ -727,17 +898,18 @@ 

          self.c1.source = "pungi_cfg#hash"

          db.session.commit()

  

-         with self.test_request_context(user='dev4'):

+         with self.test_request_context(user="dev4"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1')

+             rv = self.client.patch("/api/1/composes/1")

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

              data["message"],

-             "User dev4 not allowed to operate with compose with raw_config_keys=pungi_cfg.")

+             "User dev4 not allowed to operate with compose with raw_config_keys=pungi_cfg.",

+         )

  

      def test_submit_build_resurrection_allowed_raw_config_key(self):

          self.c1.state = COMPOSE_STATES["removed"]
@@ -746,79 +918,80 @@ 

          self.c1.source = "pungi_cfg2#hash"

          db.session.commit()

  

-         with self.test_request_context(user='dev4'):

+         with self.test_request_context(user="dev4"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1')

+             rv = self.client.patch("/api/1/composes/1")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['id'], 3)

-         self.assertEqual(data['state_name'], 'wait')

+         self.assertEqual(data["id"], 3)

+         self.assertEqual(data["state_name"], "wait")

  

      def test_submit_build_resurrection_failed(self):

          self.c1.state = COMPOSE_STATES["failed"]

          self.c1.reused_id = 1

          db.session.commit()

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1')

+             rv = self.client.patch("/api/1/composes/1")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['id'], 3)

-         self.assertEqual(data['state_name'], 'wait')

-         self.assertEqual(data['source'], 'testmodule:master')

-         self.assertEqual(data['time_removed'], None)

+         self.assertEqual(data["id"], 3)

+         self.assertEqual(data["state_name"], "wait")

+         self.assertEqual(data["source"], "testmodule:master")

+         self.assertEqual(data["time_removed"], None)

  

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.reused_id, None)

  

      def test_submit_build_resurrection_no_removed(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1')

+             rv = self.client.patch("/api/1/composes/1")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['message'], 'No compose with id 1 found')

+         self.assertEqual(data["message"], "No compose with id 1 found")

  

      def test_submit_build_resurrection_not_found(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/100')

+             rv = self.client.patch("/api/1/composes/100")

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['message'], 'No compose with id 100 found')

+         self.assertEqual(data["message"], "No compose with id 100 found")

  

      def test_submit_build_resurrection_removed_new_sigkeys(self):

          self.c1.state = COMPOSE_STATES["removed"]

          self.c1.reused_id = 1

          db.session.commit()

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1', data=json.dumps(

-                 {"sigkeys": ["123", "456"]}))

+             rv = self.client.patch(

+                 "/api/1/composes/1", data=json.dumps({"sigkeys": ["123", "456"]})

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['id'], 3)

-         self.assertEqual(data['state_name'], 'wait')

-         self.assertEqual(data['source'], 'testmodule:master')

-         self.assertEqual(data['sigkeys'], '123 456')

-         self.assertEqual(data['time_removed'], None)

+         self.assertEqual(data["id"], 3)

+         self.assertEqual(data["state_name"], "wait")

+         self.assertEqual(data["source"], "testmodule:master")

+         self.assertEqual(data["sigkeys"], "123 456")

+         self.assertEqual(data["time_removed"], None)

  

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.reused_id, None)
@@ -829,272 +1002,348 @@ 

          self.c1.sigkeys = "012 345"

          db.session.commit()

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/1', data=json.dumps(

-                 {"sigkeys": ["123", "456"]}))

+             rv = self.client.patch(

+                 "/api/1/composes/1", data=json.dumps({"sigkeys": ["123", "456"]})

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(data['id'], 3)

-         self.assertEqual(data['state_name'], 'wait')

-         self.assertEqual(data['source'], 'testmodule:master')

-         self.assertEqual(data['sigkeys'], '123 456')

-         self.assertEqual(data['time_removed'], None)

+         self.assertEqual(data["id"], 3)

+         self.assertEqual(data["state_name"], "wait")

+         self.assertEqual(data["source"], "testmodule:master")

+         self.assertEqual(data["sigkeys"], "123 456")

+         self.assertEqual(data["time_removed"], None)

  

          c = db.session.query(Compose).filter(Compose.id == 3).one()

          self.assertEqual(c.reused_id, None)

  

      def test_submit_build_not_allowed_source_type(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'repo', 'source': '/path'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps({"source": {"type": "repo", "source": "/path"}}),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data['message'],

-             'User dev not allowed to operate with compose with source_types=repo.')

+             data["message"],

+             "User dev not allowed to operate with compose with source_types=repo.",

+         )

  

-     @patch.object(odcs.server.config.Config, 'raw_config_urls',

-                   new={"pungi_cfg": "http://localhost/pungi.conf#%s"})

+     @patch.object(

+         odcs.server.config.Config,

+         "raw_config_urls",

+         new={"pungi_cfg": "http://localhost/pungi.conf#%s"},

+     )

      def test_submit_build_not_allowed_compose_type(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config', 'source': 'pungi_cfg#hash'},

-                  'compose_type': 'production'}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "raw_config", "source": "pungi_cfg#hash"},

+                         "compose_type": "production",

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data['message'],

-             'User dev2 not allowed to operate with compose with compose_types=production.')

+             data["message"],

+             "User dev2 not allowed to operate with compose with compose_types=production.",

+         )

  

-     @patch.object(odcs.server.config.Config, 'raw_config_urls',

-                   new={"pungi_cfg": "http://localhost/pungi.conf#%s"})

+     @patch.object(

+         odcs.server.config.Config,

+         "raw_config_urls",

+         new={"pungi_cfg": "http://localhost/pungi.conf#%s"},

+     )

      def test_submit_build_not_allowed_raw_config_key(self):

-         with self.test_request_context(user='dev4'):

+         with self.test_request_context(user="dev4"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config', 'source': 'pungi_cfg#hash'},

-                  'compose_type': 'production'}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "raw_config", "source": "pungi_cfg#hash"},

+                         "compose_type": "production",

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data['message'],

-             'User dev4 not allowed to operate with compose with raw_config_keys=pungi_cfg.')

+             data["message"],

+             "User dev4 not allowed to operate with compose with raw_config_keys=pungi_cfg.",

+         )

  

-     @patch.object(odcs.server.config.Config, 'raw_config_urls',

-                   new={"pungi_cfg2": "http://localhost/pungi.conf#%s"})

+     @patch.object(

+         odcs.server.config.Config,

+         "raw_config_urls",

+         new={"pungi_cfg2": "http://localhost/pungi.conf#%s"},

+     )

      def test_submit_build_allowed_raw_config_key(self):

-         with self.test_request_context(user='dev4'):

+         with self.test_request_context(user="dev4"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config', 'source': 'pungi_cfg2#hash'},

-                  'compose_type': 'production'}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "raw_config", "source": "pungi_cfg2#hash"},

+                         "compose_type": "production",

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["source"], "pungi_cfg2#hash")

  

      def test_submit_build_unknown_source_type(self):

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'unknown', 'source': '/path'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps({"source": {"type": "unknown", "source": "/path"}}),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual(

-             data['message'], 'Unknown source type "unknown"')

+         self.assertEqual(data["message"], 'Unknown source type "unknown"')

  

      def test_submit_module_build_wrong_source(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master x'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master x"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data["message"], 'Module definition must be in "n:s", "n:s:v" or '

-             '"n:s:v:c" format, but got x')

+             data["message"],

+             'Module definition must be in "n:s", "n:s:v" or '

+             '"n:s:v:c" format, but got x',

+         )

  

      def test_submit_module_build_base_module_in_source(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master platform:x'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "module",

+                             "source": "testmodule:master platform:x",

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data["message"], 'ODCS currently cannot create compose with base '

-             'modules, but platform was requested.')

+             data["message"],

+             "ODCS currently cannot create compose with base "

+             "modules, but platform was requested.",

+         )

  

      def test_submit_build_per_user_source_type_allowed(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["state_name"], "wait")

  

      def test_submit_build_per_user_source_type_not_allowed(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': '/path',

-                             'packages': ['foo']}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "tag", "source": "/path", "packages": ["foo"]}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data['message'],

-             'User dev2 not allowed to operate with compose with source_types=tag.')

+             data["message"],

+             "User dev2 not allowed to operate with compose with source_types=tag.",

+         )

  

      def test_submit_build_per_group_source_type_allowed(self):

-         with self.test_request_context(user="unknown", groups=['dev2', "x"]):

+         with self.test_request_context(user="unknown", groups=["dev2", "x"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["state_name"], "wait")

  

      def test_submit_build_per_group_source_type_not_allowed(self):

-         with self.test_request_context(user="unknown", groups=['dev2', "x"]):

+         with self.test_request_context(user="unknown", groups=["dev2", "x"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': '/path',

-                             'packages': ['foo']}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "tag", "source": "/path", "packages": ["foo"]}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

-             data['message'],

-             'User unknown not allowed to operate with compose with source_types=tag.')

+             data["message"],

+             "User unknown not allowed to operate with compose with source_types=tag.",

+         )

  

      def test_query_compose(self):

-         resp = self.client.get('/api/1/composes/1')

+         resp = self.client.get("/api/1/composes/1")

          data = json.loads(resp.get_data(as_text=True))

-         self.assertEqual(data['id'], 1)

-         self.assertEqual(data['source'], "testmodule:master")

-         self.assertEqual(data['pungi_config_dump'], None)

+         self.assertEqual(data["id"], 1)

+         self.assertEqual(data["source"], "testmodule:master")

+         self.assertEqual(data["pungi_config_dump"], None)

  

      def test_query_composes(self):

-         resp = self.client.get('/api/1/composes/')

-         evs = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/")

+         evs = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual(len(evs), 2)

  

      def test_query_compose_owner(self):

-         resp = self.client.get('/api/1/composes/?owner=me')

-         evs = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/?owner=me")

+         evs = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual(len(evs), 1)

-         self.assertEqual(evs[0]['source'], 'f26')

+         self.assertEqual(evs[0]["source"], "f26")

          self.assertTrue("pungi_config_dump" not in evs[0])

  

      def test_query_compose_state_done(self):

-         resp = self.client.get(

-             '/api/1/composes/?state=%d' % COMPOSE_STATES["done"])

-         evs = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/?state=%d" % COMPOSE_STATES["done"])

+         evs = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual(len(evs), 0)

  

      def test_query_compose_state_wait(self):

-         resp = self.client.get(

-             '/api/1/composes/?state=%d' % COMPOSE_STATES["wait"])

-         evs = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/?state=%d" % COMPOSE_STATES["wait"])

+         evs = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual(len(evs), 2)

  

      def test_query_compose_source_type(self):

          resp = self.client.get(

-             '/api/1/composes/?source_type=%d' % PungiSourceType.MODULE)

-         evs = json.loads(resp.get_data(as_text=True))['items']

+             "/api/1/composes/?source_type=%d" % PungiSourceType.MODULE

+         )

+         evs = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual(len(evs), 1)

  

      def test_query_compose_source(self):

-         resp = self.client.get(

-             '/api/1/composes/?source=f26')

-         evs = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/?source=f26")

+         evs = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual(len(evs), 1)

  

      def test_query_composes_order_by_default(self):

-         resp = self.client.get('/api/1/composes/')

-         composes = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/")

+         composes = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual([2, 1], [compose["id"] for compose in composes])

  

      def test_query_composes_order_by_id_asc(self):

-         resp = self.client.get('/api/1/composes/?order_by=id')

-         composes = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/?order_by=id")

+         composes = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual([1, 2], [compose["id"] for compose in composes])

  

      def test_query_composes_order_by_id_desc(self):

-         resp = self.client.get('/api/1/composes/?order_by=-id')

-         composes = json.loads(resp.get_data(as_text=True))['items']

+         resp = self.client.get("/api/1/composes/?order_by=-id")

+         composes = json.loads(resp.get_data(as_text=True))["items"]

          self.assertEqual([2, 1], [compose["id"] for compose in composes])

  

      def test_query_composes_order_by_id_unknown_key(self):

-         resp = self.client.get('/api/1/composes/?order_by=foo')

+         resp = self.client.get("/api/1/composes/?order_by=foo")

          data = json.loads(resp.get_data(as_text=True))

-         self.assertEqual(data['status'], 400)

-         self.assertEqual(data['error'], 'Bad Request')

-         self.assertTrue(data['message'].startswith(

-             "An invalid order_by key was suplied, allowed keys are"))

+         self.assertEqual(data["status"], 400)

+         self.assertEqual(data["error"], "Bad Request")

+         self.assertTrue(

+             data["message"].startswith(

+                 "An invalid order_by key was suplied, allowed keys are"

+             )

+         )

  

      def test_delete_compose(self):

          with freeze_time(self.initial_datetime) as frozen_datetime:

              c3 = Compose.create(

-                 db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

-                 COMPOSE_RESULTS["repository"], 60)

-             c3.state = COMPOSE_STATES['done']

+                 db.session,

+                 "unknown",

+                 PungiSourceType.MODULE,

+                 "testmodule:master",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

+             c3.state = COMPOSE_STATES["done"]

              db.session.add(c3)

              db.session.commit()

  

              self.assertEqual(len(Compose.composes_to_expire()), 0)

  

-             with self.test_request_context(user='root'):

+             with self.test_request_context(user="root"):

                  flask.g.oidc_scopes = [

-                     '{0}{1}'.format(conf.oidc_base_namespace, 'delete-compose')

+                     "{0}{1}".format(conf.oidc_base_namespace, "delete-compose")

                  ]

  

                  resp = self.client.delete("/api/1/composes/%s" % c3.id)

                  data = json.loads(resp.get_data(as_text=True))

  

-             self.assertEqual(resp.status, '202 ACCEPTED')

+             self.assertEqual(resp.status, "202 ACCEPTED")

  

-             self.assertEqual(data['status'], 202)

-             self.assertEqual(data['message'],

-                              "The delete request for compose (id=%s) has been accepted and will be processed by backend later." % c3.id)

+             self.assertEqual(data["status"], 202)

+             self.assertEqual(

+                 data["message"],

+                 "The delete request for compose (id=%s) has been accepted and will be processed by backend later."

+                 % c3.id,

+             )

  

              self.assertEqual(c3.time_to_expire, self.initial_datetime)

  
@@ -1102,164 +1351,217 @@ 

              self.assertEqual(len(Compose.composes_to_expire()), 1)

              expired_compose = Compose.composes_to_expire().pop()

              self.assertEqual(expired_compose.id, c3.id)

-             self.assertEqual(expired_compose.removed_by, 'root')

+             self.assertEqual(expired_compose.removed_by, "root")

  

      def test_delete_not_allowed_states_compose(self):

          for state in COMPOSE_STATES.keys():

-             if state not in ['wait', 'done', 'failed']:

+             if state not in ["wait", "done", "failed"]:

                  new_c = Compose.create(

-                     db.session, "unknown", PungiSourceType.MODULE, "testmodule:master",

-                     COMPOSE_RESULTS["repository"], 60)

+                     db.session,

+                     "unknown",

+                     PungiSourceType.MODULE,

+                     "testmodule:master",

+                     COMPOSE_RESULTS["repository"],

+                     60,

+                 )

                  new_c.state = COMPOSE_STATES[state]

                  db.session.add(new_c)

                  db.session.commit()

                  compose_id = new_c.id

  

-                 with self.test_request_context(user='root'):

+                 with self.test_request_context(user="root"):

                      flask.g.oidc_scopes = [

-                         '{0}{1}'.format(conf.oidc_base_namespace, 'delete-compose')

+                         "{0}{1}".format(conf.oidc_base_namespace, "delete-compose")

                      ]

  

                      resp = self.client.delete("/api/1/composes/%s" % compose_id)

                      data = json.loads(resp.get_data(as_text=True))

  

-                 self.assertEqual(resp.status, '400 BAD REQUEST')

-                 self.assertEqual(data['status'], 400)

-                 six.assertRegex(self, data['message'],

-                                 r"Compose \(id=%s\) can not be removed, its state need to be in .*." % new_c.id)

-                 self.assertEqual(data['error'], 'Bad Request')

+                 self.assertEqual(resp.status, "400 BAD REQUEST")

+                 self.assertEqual(data["status"], 400)

+                 six.assertRegex(

+                     self,

+                     data["message"],

+                     r"Compose \(id=%s\) can not be removed, its state need to be in .*."

+                     % new_c.id,

+                 )

+                 self.assertEqual(data["error"], "Bad Request")

  

      def test_delete_non_exist_compose(self):

-         with self.test_request_context(user='root'):

+         with self.test_request_context(user="root"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'delete-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "delete-compose")

              ]

  

              resp = self.client.delete("/api/1/composes/999999")

              data = json.loads(resp.get_data(as_text=True))

  

-         self.assertEqual(resp.status, '404 NOT FOUND')

-         self.assertEqual(data['status'], 404)

-         self.assertEqual(data['message'], "No such compose found.")

-         self.assertEqual(data['error'], 'Not Found')

+         self.assertEqual(resp.status, "404 NOT FOUND")

+         self.assertEqual(data["status"], 404)

+         self.assertEqual(data["message"], "No such compose found.")

+         self.assertEqual(data["error"], "Not Found")

  

      def test_delete_compose_with_non_admin_user(self):

          self.c1.state = COMPOSE_STATES["failed"]

          db.session.commit()

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'delete-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "delete-compose")

              ]

  

              resp = self.client.delete("/api/1/composes/%s" % self.c1.id)

              data = json.loads(resp.get_data(as_text=True))

  

-         self.assertEqual(resp.status, '403 FORBIDDEN')

+         self.assertEqual(resp.status, "403 FORBIDDEN")

          self.assertEqual(resp.status_code, 403)

-         self.assertEqual(data['status'], 403)

-         self.assertEqual(data['message'], 'User dev is not in role admins.')

+         self.assertEqual(data["status"], 403)

+         self.assertEqual(data["message"], "User dev is not in role admins.")

  

      def test_can_not_create_compose_with_non_composer_user(self):

-         with self.test_request_context(user='qa'):

+         with self.test_request_context(user="qa"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

+             resp = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master"}}

+                 ),

+             )

              data = json.loads(resp.get_data(as_text=True))

  

-         self.assertEqual(resp.status, '403 FORBIDDEN')

+         self.assertEqual(resp.status, "403 FORBIDDEN")

          self.assertEqual(resp.status_code, 403)

-         self.assertEqual(data['status'], 403)

-         self.assertEqual(data['message'], 'User qa is not in role allowed_clients.')

+         self.assertEqual(data["status"], 403)

+         self.assertEqual(data["message"], "User qa is not in role allowed_clients.")

  

      def test_can_create_compose_with_user_in_configured_groups(self):

-         with self.test_request_context(user='another_user', groups=['composer']):

+         with self.test_request_context(user="another_user", groups=["composer"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:rawhide'}}))

+             resp = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:rawhide"}}

+                 ),

+             )

          db.session.expire_all()

  

-         self.assertEqual(resp.status, '200 OK')

+         self.assertEqual(resp.status, "200 OK")

          self.assertEqual(resp.status_code, 200)

-         c = db.session.query(Compose).filter(Compose.source == 'testmodule:rawhide').one()

+         c = (

+             db.session.query(Compose)

+             .filter(Compose.source == "testmodule:rawhide")

+             .one()

+         )

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_can_create_compose_with_user_in_multiple_groups(self):

-         with self.test_request_context(user='another_user', groups=['dev3', 'dev2']):

+         with self.test_request_context(user="another_user", groups=["dev3", "dev2"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:rawhide'}}))

+             resp = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:rawhide"}}

+                 ),

+             )

          db.session.expire_all()

  

-         self.assertEqual(resp.status, '200 OK')

+         self.assertEqual(resp.status, "200 OK")

          self.assertEqual(resp.status_code, 200)

-         c = db.session.query(Compose).filter(Compose.source == 'testmodule:rawhide').one()

+         c = (

+             db.session.query(Compose)

+             .filter(Compose.source == "testmodule:rawhide")

+             .one()

+         )

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

  

      def test_cannot_create_compose_with_user_in_multiple_groups(self):

-         with self.test_request_context(user='another_user', groups=['dev3', 'dev2']):

+         with self.test_request_context(user="another_user", groups=["dev3", "dev2"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'tag', 'source': 'testmodule:rawhide'}}))

+             resp = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "tag", "source": "testmodule:rawhide"}}

+                 ),

+             )

              data = json.loads(resp.get_data(as_text=True))

          db.session.expire_all()

  

-         self.assertEqual(resp.status, '403 FORBIDDEN')

+         self.assertEqual(resp.status, "403 FORBIDDEN")

          self.assertEqual(resp.status_code, 403)

          self.assertEqual(

-             data['message'],

-             'User another_user not allowed to operate with compose with source_types=tag.')

+             data["message"],

+             "User another_user not allowed to operate with compose with source_types=tag.",

+         )

  

      def test_can_delete_compose_with_user_in_configured_groups(self):

          c3 = Compose.create(

-             db.session, "unknown", PungiSourceType.MODULE, "testmodule:testbranch",

-             COMPOSE_RESULTS["repository"], 60)

-         c3.state = COMPOSE_STATES['done']

+             db.session,

+             "unknown",

+             PungiSourceType.MODULE,

+             "testmodule:testbranch",

+             COMPOSE_RESULTS["repository"],

+             60,

+         )

+         c3.state = COMPOSE_STATES["done"]

          db.session.add(c3)

          db.session.commit()

  

-         with self.test_request_context(user='another_admin', groups=['admin']):

+         with self.test_request_context(user="another_admin", groups=["admin"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'delete-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "delete-compose")

              ]

  

              resp = self.client.delete("/api/1/composes/%s" % c3.id)

              data = json.loads(resp.get_data(as_text=True))

  

-         self.assertEqual(resp.status, '202 ACCEPTED')

+         self.assertEqual(resp.status, "202 ACCEPTED")

          self.assertEqual(resp.status_code, 202)

-         self.assertEqual(data['status'], 202)

-         six.assertRegex(self, data['message'],

-                         r"The delete request for compose \(id=%s\) has been accepted and will be processed by backend later." % c3.id)

+         self.assertEqual(data["status"], 202)

+         six.assertRegex(

+             self,

+             data["message"],

+             r"The delete request for compose \(id=%s\) has been accepted and will be processed by backend later."

+             % c3.id,

+         )

  

      def test_can_create_compose_with_permission_overriden_by_username(self):

-         with self.test_request_context(user='dev3', groups=['dev2']):

+         with self.test_request_context(user="dev3", groups=["dev2"]):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             resp = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:rawhide'}}))

+             resp = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:rawhide"}}

+                 ),

+             )

          db.session.expire_all()

  

-         self.assertEqual(resp.status, '403 FORBIDDEN')

+         self.assertEqual(resp.status, "403 FORBIDDEN")

          self.assertEqual(resp.status_code, 403)

  

-     @patch.object(odcs.server.config.Config, 'max_seconds_to_live', new_callable=PropertyMock)

-     @patch.object(odcs.server.config.Config, 'seconds_to_live', new_callable=PropertyMock)

-     def test_use_seconds_to_live_in_request(self, mock_seconds_to_live, mock_max_seconds_to_live):

+     @patch.object(

+         odcs.server.config.Config, "max_seconds_to_live", new_callable=PropertyMock

+     )

+     @patch.object(

+         odcs.server.config.Config, "seconds_to_live", new_callable=PropertyMock

+     )

+     def test_use_seconds_to_live_in_request(

+         self, mock_seconds_to_live, mock_max_seconds_to_live

+     ):

          # Test that seconds-to-live is still supported to keep backward compatibility.

          for seconds_to_live in ["seconds-to-live", "seconds_to_live"]:

              # if we have 'seconds_to_live' in request < conf.max_seconds_to_live
@@ -1267,103 +1569,149 @@ 

              mock_seconds_to_live.return_value = 60 * 60 * 24

              mock_max_seconds_to_live.return_value = 60 * 60 * 24 * 3

  

-             with self.test_request_context(user='dev'):

+             with self.test_request_context(user="dev"):

                  flask.g.oidc_scopes = [

-                     '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                     "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

                  ]

  

-                 rv = self.client.post('/api/1/composes/', data=json.dumps(

-                     {'source': {'type': 'module', 'source': 'testmodule:master'},

-                      'seconds_to_live': 60 * 60 * 12}))

+                 rv = self.client.post(

+                     "/api/1/composes/",

+                     data=json.dumps(

+                         {

+                             "source": {"type": "module", "source": "testmodule:master"},

+                             "seconds_to_live": 60 * 60 * 12,

+                         }

+                     ),

+                 )

                  data = json.loads(rv.get_data(as_text=True))

  

-             time_submitted = datetime.strptime(data['time_submitted'], "%Y-%m-%dT%H:%M:%SZ")

-             time_to_expire = datetime.strptime(data['time_to_expire'], "%Y-%m-%dT%H:%M:%SZ")

+             time_submitted = datetime.strptime(

+                 data["time_submitted"], "%Y-%m-%dT%H:%M:%SZ"

+             )

+             time_to_expire = datetime.strptime(

+                 data["time_to_expire"], "%Y-%m-%dT%H:%M:%SZ"

+             )

              delta = timedelta(hours=12)

              self.assertEqual(time_to_expire - time_submitted, delta)

  

-     @patch.object(odcs.server.config.Config, 'max_seconds_to_live', new_callable=PropertyMock)

-     @patch.object(odcs.server.config.Config, 'seconds_to_live', new_callable=PropertyMock)

-     def test_use_max_seconds_to_live_in_conf(self, mock_seconds_to_live, mock_max_seconds_to_live):

+     @patch.object(

+         odcs.server.config.Config, "max_seconds_to_live", new_callable=PropertyMock

+     )

+     @patch.object(

+         odcs.server.config.Config, "seconds_to_live", new_callable=PropertyMock

+     )

+     def test_use_max_seconds_to_live_in_conf(

+         self, mock_seconds_to_live, mock_max_seconds_to_live

+     ):

          # if we have 'seconds_to_live' in request > conf.max_seconds_to_live

          # conf.max_seconds_to_live will be used

          mock_seconds_to_live.return_value = 60 * 60 * 24

          mock_max_seconds_to_live.return_value = 60 * 60 * 24 * 3

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}, 'seconds_to_live': 60 * 60 * 24 * 7}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "module", "source": "testmodule:master"},

+                         "seconds_to_live": 60 * 60 * 24 * 7,

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         time_submitted = datetime.strptime(data['time_submitted'], "%Y-%m-%dT%H:%M:%SZ")

-         time_to_expire = datetime.strptime(data['time_to_expire'], "%Y-%m-%dT%H:%M:%SZ")

+         time_submitted = datetime.strptime(data["time_submitted"], "%Y-%m-%dT%H:%M:%SZ")

+         time_to_expire = datetime.strptime(data["time_to_expire"], "%Y-%m-%dT%H:%M:%SZ")

          delta = timedelta(days=3)

          self.assertEqual(time_to_expire - time_submitted, delta)

  

-     @patch.object(odcs.server.config.Config, 'max_seconds_to_live', new_callable=PropertyMock)

-     @patch.object(odcs.server.config.Config, 'seconds_to_live', new_callable=PropertyMock)

-     def test_use_seconds_to_live_in_conf(self, mock_seconds_to_live, mock_max_seconds_to_live):

+     @patch.object(

+         odcs.server.config.Config, "max_seconds_to_live", new_callable=PropertyMock

+     )

+     @patch.object(

+         odcs.server.config.Config, "seconds_to_live", new_callable=PropertyMock

+     )

+     def test_use_seconds_to_live_in_conf(

+         self, mock_seconds_to_live, mock_max_seconds_to_live

+     ):

          # if we don't have 'seconds_to_live' in request, conf.seconds_to_live will be used

          mock_seconds_to_live.return_value = 60 * 60 * 24

          mock_max_seconds_to_live.return_value = 60 * 60 * 24 * 3

  

-         with self.test_request_context(user='dev'):

+         with self.test_request_context(user="dev"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         time_submitted = datetime.strptime(data['time_submitted'], "%Y-%m-%dT%H:%M:%SZ")

-         time_to_expire = datetime.strptime(data['time_to_expire'], "%Y-%m-%dT%H:%M:%SZ")

+         time_submitted = datetime.strptime(data["time_submitted"], "%Y-%m-%dT%H:%M:%SZ")

+         time_to_expire = datetime.strptime(data["time_to_expire"], "%Y-%m-%dT%H:%M:%SZ")

          delta = timedelta(hours=24)

          self.assertEqual(time_to_expire - time_submitted, delta)

  

-     @patch.object(odcs.server.config.Config, 'auth_backend', new_callable=PropertyMock)

-     def test_anonymous_user_can_submit_build_with_noauth_backend(self, mock_auth_backend):

-         mock_auth_backend.return_value = 'noauth'

+     @patch.object(odcs.server.config.Config, "auth_backend", new_callable=PropertyMock)

+     def test_anonymous_user_can_submit_build_with_noauth_backend(

+         self, mock_auth_backend

+     ):

+         mock_auth_backend.return_value = "noauth"

  

          with self.test_request_context():

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'module', 'source': 'testmodule:master'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "module", "source": "testmodule:master"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         expected_json = {'source_type': 2, 'state': 0, 'time_done': None,

-                          'state_name': 'wait',

-                          'state_reason': None,

-                          'source': u'testmodule:master',

-                          'owner': u'unknown',

-                          'result_repo': 'http://localhost/odcs/odcs-%d/compose/Temporary' % data['id'],

-                          'result_repofile': 'http://localhost/odcs/odcs-%d/compose/Temporary/odcs-%d.repo' % (data['id'], data['id']),

-                          'time_submitted': data["time_submitted"], 'id': data['id'],

-                          'time_started': None,

-                          'time_removed': None,

-                          'removed_by': None,

-                          'time_to_expire': data["time_to_expire"],

-                          'flags': [],

-                          'results': ['repository'],

-                          'sigkeys': '',

-                          'koji_event': None,

-                          'koji_task_id': None,

-                          'packages': None,

-                          'builds': None,

-                          'arches': 'x86_64',

-                          'multilib_arches': '',

-                          'multilib_method': 0,

-                          'lookaside_repos': '',

-                          'modular_koji_tags': None,

-                          'module_defaults_url': None,

-                          'label': None,

-                          'compose_type': 'test',

-                          'pungi_compose_id': None,

-                          'target_dir': 'default',

-                          'toplevel_url': 'http://localhost/odcs/odcs-%d' % data['id']}

+         expected_json = {

+             "source_type": 2,

+             "state": 0,

+             "time_done": None,

+             "state_name": "wait",

+             "state_reason": None,

+             "source": u"testmodule:master",

+             "owner": u"unknown",

+             "result_repo": "http://localhost/odcs/odcs-%d/compose/Temporary"

+             % data["id"],

+             "result_repofile": "http://localhost/odcs/odcs-%d/compose/Temporary/odcs-%d.repo"

+             % (data["id"], data["id"]),

+             "time_submitted": data["time_submitted"],

+             "id": data["id"],

+             "time_started": None,

+             "time_removed": None,

+             "removed_by": None,

+             "time_to_expire": data["time_to_expire"],

+             "flags": [],

+             "results": ["repository"],

+             "sigkeys": "",

+             "koji_event": None,

+             "koji_task_id": None,

+             "packages": None,

+             "builds": None,

+             "arches": "x86_64",

+             "multilib_arches": "",

+             "multilib_method": 0,

+             "lookaside_repos": "",

+             "modular_koji_tags": None,

+             "module_defaults_url": None,

+             "label": None,

+             "compose_type": "test",

+             "pungi_compose_id": None,

+             "target_dir": "default",

+             "toplevel_url": "http://localhost/odcs/odcs-%d" % data["id"],

+         }

          self.assertEqual(data, expected_json)

  

          db.session.expire_all()
@@ -1376,8 +1724,9 @@ 

  

      def setUp(self):

          super(TestExtendExpiration, self).setUp()

-         self.oidc_base_namespace = patch.object(conf, 'oidc_base_namespace',

-                                                 new='http://example.com/')

+         self.oidc_base_namespace = patch.object(

+             conf, "oidc_base_namespace", new="http://example.com/"

+         )

          self.oidc_base_namespace.start()

  

      def tearDown(self):
@@ -1385,21 +1734,42 @@ 

          super(TestExtendExpiration, self).tearDown()

  

      def setup_test_data(self):

-         self.initial_datetime = datetime(year=2016, month=1, day=1,

-                                          hour=0, minute=0, second=0)

+         self.initial_datetime = datetime(

+             year=2016, month=1, day=1, hour=0, minute=0, second=0

+         )

          with freeze_time(self.initial_datetime):

              self.c1 = Compose.create(

-                 db.session, "me", PungiSourceType.KOJI_TAG, "f25",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "me",

+                 PungiSourceType.KOJI_TAG,

+                 "f25",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

              self.c2 = Compose.create(

-                 db.session, "me", PungiSourceType.KOJI_TAG, "f26",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "me",

+                 PungiSourceType.KOJI_TAG,

+                 "f26",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

              self.c3 = Compose.create(

-                 db.session, "me", PungiSourceType.KOJI_TAG, "f27",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "me",

+                 PungiSourceType.KOJI_TAG,

+                 "f27",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

              self.c4 = Compose.create(

-                 db.session, "me", PungiSourceType.KOJI_TAG, "master",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "me",

+                 PungiSourceType.KOJI_TAG,

+                 "master",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

  

              map(db.session.add, (self.c1, self.c2, self.c3, self.c4))

              db.session.commit()
@@ -1412,91 +1782,90 @@ 

              self.c1_id = self.c1.id

              self.c3_id = self.c3.id

  

-     @patch.object(conf, 'auth_backend', new='noauth')

+     @patch.object(conf, "auth_backend", new="noauth")

      def test_bad_request_if_seconds_to_live_is_invalid(self):

-         post_data = json.dumps({

-             'seconds_to_live': '600s'

-         })

+         post_data = json.dumps({"seconds_to_live": "600s"})

          with self.test_request_context():

-             rv = self.client.patch('/api/1/composes/{0}'.format(self.c1.id),

-                                    data=post_data)

+             rv = self.client.patch(

+                 "/api/1/composes/{0}".format(self.c1.id), data=post_data

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-             self.assertEqual(400, data['status'])

-             self.assertEqual('Bad Request', data['error'])

-             self.assertIn('Invalid seconds_to_live specified in request',

-                           data['message'])

+             self.assertEqual(400, data["status"])

+             self.assertEqual("Bad Request", data["error"])

+             self.assertIn(

+                 "Invalid seconds_to_live specified in request", data["message"]

+             )

  

-     @patch.object(conf, 'auth_backend', new='noauth')

+     @patch.object(conf, "auth_backend", new="noauth")

      def test_bad_request_if_request_data_is_not_json(self):

          with self.test_request_context():

-             rv = self.client.patch('/api/1/composes/{0}'.format(self.c1.id),

-                                    data='abc')

+             rv = self.client.patch("/api/1/composes/{0}".format(self.c1.id), data="abc")

              data = json.loads(rv.get_data(as_text=True))

  

-             self.assertEqual(400, data['status'])

-             self.assertEqual('Bad Request', data['error'])

-             self.assertIn('Failed to decode JSON object', data['message'])

+             self.assertEqual(400, data["status"])

+             self.assertEqual("Bad Request", data["error"])

+             self.assertIn("Failed to decode JSON object", data["message"])

  

-     @patch.object(conf, 'oidc_base_namespace', new='http://example.com/')

+     @patch.object(conf, "oidc_base_namespace", new="http://example.com/")

      def test_fail_if_extend_non_existing_compose(self):

-         post_data = json.dumps({

-             'seconds_to_live': 600

-         })

+         post_data = json.dumps({"seconds_to_live": 600})

          with self.test_request_context():

-             flask.g.oidc_scopes = ['http://example.com/new-compose',

-                                    'http://example.com/renew-compose']

+             flask.g.oidc_scopes = [

+                 "http://example.com/new-compose",

+                 "http://example.com/renew-compose",

+             ]

  

-             rv = self.client.patch('/api/1/composes/999', data=post_data)

+             rv = self.client.patch("/api/1/composes/999", data=post_data)

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual('No compose with id 999 found', data['message'])

+         self.assertEqual("No compose with id 999 found", data["message"])

  

      def test_fail_if_compose_is_not_done(self):

-         self.c1.state = COMPOSE_STATES['wait']

+         self.c1.state = COMPOSE_STATES["wait"]

          db.session.commit()

  

-         post_data = json.dumps({

-             'seconds_to_live': 600

-         })

+         post_data = json.dumps({"seconds_to_live": 600})

          with self.test_request_context():

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

  

-             rv = self.client.patch('/api/1/composes/{0}'.format(self.c1.id),

-                                    data=post_data)

+             rv = self.client.patch(

+                 "/api/1/composes/{0}".format(self.c1.id), data=post_data

+             )

              data = json.loads(rv.get_data(as_text=True))

  

-         self.assertEqual('No compose with id {0} found'.format(self.c1.id),

-                          data['message'])

+         self.assertEqual(

+             "No compose with id {0} found".format(self.c1.id), data["message"]

+         )

  

      def test_extend_compose_expiration(self):

          fake_utcnow = datetime.utcnow()

  

-         self.c2.state = COMPOSE_STATES['done']

+         self.c2.state = COMPOSE_STATES["done"]

          self.c2.time_to_expire = fake_utcnow - timedelta(seconds=10)

          db.session.commit()

  

          expected_seconds_to_live = 60 * 60 * 3

          expected_time_to_expire = fake_utcnow + timedelta(

-             seconds=expected_seconds_to_live)

-         post_data = json.dumps({

-             'seconds_to_live': expected_seconds_to_live

-         })

+             seconds=expected_seconds_to_live

+         )

+         post_data = json.dumps({"seconds_to_live": expected_seconds_to_live})

  

          with self.test_request_context():

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'renew-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "renew-compose")

              ]

              with freeze_time(fake_utcnow):

-                 url = '/api/1/composes/{0}'.format(self.c2.id)

+                 url = "/api/1/composes/{0}".format(self.c2.id)

                  rv = self.client.patch(url, data=post_data)

                  data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(

              Compose._utc_datetime_to_iso(expected_time_to_expire),

-             data['time_to_expire'])

+             data["time_to_expire"],

+         )

  

          # Compose reusing self.c2 and the one self.c2 reuses should also be

          # extended.
@@ -1511,8 +1880,9 @@ 

  

      def setUp(self):

          super(TestViewsRawConfig, self).setUp()

-         self.oidc_base_namespace = patch.object(conf, 'oidc_base_namespace',

-                                                 new='http://example.com/')

+         self.oidc_base_namespace = patch.object(

+             conf, "oidc_base_namespace", new="http://example.com/"

+         )

          self.oidc_base_namespace.start()

  

      def tearDown(self):
@@ -1520,125 +1890,158 @@ 

          self.oidc_base_namespace.stop()

  

      def test_submit_build_raw_config_too_many_sources(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': 'pungi_cfg#hash pungi2cfg_hash'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {

+                             "type": "raw_config",

+                             "source": "pungi_cfg#hash pungi2cfg_hash",

+                         }

+                     }

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["status"], 400)

          self.assertEqual(

              data["message"],

-             'Only single source is allowed for "raw_config" source_type')

+             'Only single source is allowed for "raw_config" source_type',

+         )

  

      def test_submit_build_raw_config_no_hash(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': 'pungi_cfg'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "raw_config", "source": "pungi_cfg"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["status"], 400)

          self.assertEqual(

              data["message"],

              'Source must be in "source_name#commit_hash" format for '

-             '"raw_config" source_type.')

+             '"raw_config" source_type.',

+         )

  

      def test_submit_build_raw_config_empty_hash(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': 'pungi_cfg#'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "raw_config", "source": "pungi_cfg#"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["status"], 400)

          self.assertEqual(

              data["message"],

              'Source must be in "source_name#commit_hash" format for '

-             '"raw_config" source_type.')

+             '"raw_config" source_type.',

+         )

  

      def test_submit_build_raw_config_empty_name(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': '#hash'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps({"source": {"type": "raw_config", "source": "#hash"}}),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["status"], 400)

          self.assertEqual(

              data["message"],

              'Source must be in "source_name#commit_hash" format for '

-             '"raw_config" source_type.')

+             '"raw_config" source_type.',

+         )

  

      def test_submit_build_raw_config_empty_name_hash(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': '#'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps({"source": {"type": "raw_config", "source": "#"}}),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["status"], 400)

          self.assertEqual(

              data["message"],

              'Source must be in "source_name#commit_hash" format for '

-             '"raw_config" source_type.')

+             '"raw_config" source_type.',

+         )

  

      def test_submit_build_raw_config_unknown_name(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             rv = self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': 'pungi_cfg#hash'}}))

+             rv = self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {"source": {"type": "raw_config", "source": "pungi_cfg#hash"}}

+                 ),

+             )

              data = json.loads(rv.get_data(as_text=True))

  

          self.assertEqual(data["status"], 400)

          self.assertEqual(

              data["message"],

-             'Source "pungi_cfg" does not exist in server configuration.')

+             'Source "pungi_cfg" does not exist in server configuration.',

+         )

  

-     @patch.object(odcs.server.config.Config, 'raw_config_urls',

-                   new={"pungi_cfg": "http://localhost/pungi.conf#%s"})

+     @patch.object(

+         odcs.server.config.Config,

+         "raw_config_urls",

+         new={"pungi_cfg": "http://localhost/pungi.conf#%s"},

+     )

      def test_submit_build_raw_config(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              flask.g.oidc_scopes = [

-                 '{0}{1}'.format(conf.oidc_base_namespace, 'new-compose')

+                 "{0}{1}".format(conf.oidc_base_namespace, "new-compose")

              ]

  

-             self.client.post('/api/1/composes/', data=json.dumps(

-                 {'source': {'type': 'raw_config',

-                             'source': 'pungi_cfg#hash'},

-                  'label': 'Beta-1.2',

-                  'compose_type': 'nightly'}))

+             self.client.post(

+                 "/api/1/composes/",

+                 data=json.dumps(

+                     {

+                         "source": {"type": "raw_config", "source": "pungi_cfg#hash"},

+                         "label": "Beta-1.2",

+                         "compose_type": "nightly",

+                     }

+                 ),

+             )

          db.session.expire_all()

          c = db.session.query(Compose).filter(Compose.id == 1).one()

          self.assertEqual(c.state, COMPOSE_STATES["wait"])

          self.assertEqual(c.source_type, PungiSourceType.RAW_CONFIG)

-         self.assertEqual(c.source, 'pungi_cfg#hash')

-         self.assertEqual(c.label, 'Beta-1.2')

-         self.assertEqual(c.compose_type, 'nightly')

+         self.assertEqual(c.source, "pungi_cfg#hash")

+         self.assertEqual(c.label, "Beta-1.2")

+         self.assertEqual(c.compose_type, "nightly")

  

  

  class TestViewsCancelCompose(ViewBaseTest):
@@ -1650,8 +2053,13 @@ 

          )

          with freeze_time(self.initial_datetime):

              self.c1 = Compose.create(

-                 db.session, "dev2", PungiSourceType.MODULE, "testmodule:master",

-                 COMPOSE_RESULTS["repository"], 60)

+                 db.session,

+                 "dev2",

+                 PungiSourceType.MODULE,

+                 "testmodule:master",

+                 COMPOSE_RESULTS["repository"],

+                 60,

+             )

              db.session.commit()

              self.task_id = "71267f28-5194-4720-b57b-a665fabdb012"

              self.c1.celery_task_id = self.task_id
@@ -1660,7 +2068,7 @@ 

  

      @patch("odcs.server.views.CELERY_AVAILABLE", new=False)

      def test_no_celery(self):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              resp = self.client.delete("/api/1/composes/%s" % self.c1.id)

          # Without celery we can't cancel, so the code should try to delete the

          # compose and fail on user not being an admin.
@@ -1670,7 +2078,7 @@ 

          self.assertEqual(data["message"], "User dev2 is not in role admins.")

  

      def test_bad_owner(self):

-         with self.test_request_context(user='dev1'):

+         with self.test_request_context(user="dev1"):

              resp = self.client.delete("/api/1/composes/%s" % self.c1.id)

          self.assertEqual(resp.status_code, 403)

          data = json.loads(resp.get_data(as_text=True))
@@ -1682,7 +2090,7 @@ 

  

      @patch("odcs.server.views.celery_app")

      def test_cancel(self, app):

-         with self.test_request_context(user='dev2'):

+         with self.test_request_context(user="dev2"):

              resp = self.client.delete("/api/1/composes/%s" % self.c1_id)

          self.assertEqual(resp.status_code, 202)

          self.assertEqual(app.mock_calls, [call.control.revoke(self.task_id)])

file modified
+21 -24
@@ -38,7 +38,6 @@ 

  

  

  class ConfigPatcher(object):

- 

      def __init__(self, config_obj):

          self.objects = []

          self.config_obj = config_obj
@@ -70,42 +69,42 @@ 

  

      def setUp(self):

          # Not all tests need handlers of event after_flush and after_commit.

-         if event.contains(SignallingSession, 'after_flush',

-                           cache_composes_if_state_changed):

-             event.remove(SignallingSession, 'after_flush',

-                          cache_composes_if_state_changed)

-         if event.contains(SignallingSession, 'after_commit',

-                           start_to_publish_messages):

-             event.remove(SignallingSession, 'after_commit',

-                          start_to_publish_messages)

+         if event.contains(

+             SignallingSession, "after_flush", cache_composes_if_state_changed

+         ):

+             event.remove(

+                 SignallingSession, "after_flush", cache_composes_if_state_changed

+             )

+         if event.contains(SignallingSession, "after_commit", start_to_publish_messages):

+             event.remove(SignallingSession, "after_commit", start_to_publish_messages)

  

          db.session.remove()

          db.drop_all()

          db.create_all()

          db.session.commit()

  

-         setup_composes = getattr(self, 'setup_composes', None)

+         setup_composes = getattr(self, "setup_composes", None)

          if setup_composes is not None:

              assert callable(setup_composes)

              setup_composes()

  

-         if hasattr(self, 'setup_composes'):

-             getattr(self, 'setup_composes')()

+         if hasattr(self, "setup_composes"):

+             getattr(self, "setup_composes")()

  

          # And, if tests which need such event handlers or just tests those

          # handlers, add them back.

          if not self.disable_event_handlers:

-             event.listen(SignallingSession, 'after_flush',

-                          cache_composes_if_state_changed)

-             event.listen(SignallingSession, 'after_commit',

-                          start_to_publish_messages)

+             event.listen(

+                 SignallingSession, "after_flush", cache_composes_if_state_changed

+             )

+             event.listen(SignallingSession, "after_commit", start_to_publish_messages)

  

      def tearDown(self):

          if not self.disable_event_handlers:

-             event.remove(SignallingSession, 'after_flush',

-                          cache_composes_if_state_changed)

-             event.remove(SignallingSession, 'after_commit',

-                          start_to_publish_messages)

+             event.remove(

+                 SignallingSession, "after_flush", cache_composes_if_state_changed

+             )

+             event.remove(SignallingSession, "after_commit", start_to_publish_messages)

  

          db.session.remove()

          db.drop_all()
@@ -113,7 +112,5 @@ 

  

          # Nothing special here. Just do what should be done in tearDown to

          # to restore enviornment for each test method.

-         event.listen(SignallingSession, 'after_flush',

-                      cache_composes_if_state_changed)

-         event.listen(SignallingSession, 'after_commit',

-                      start_to_publish_messages)

+         event.listen(SignallingSession, "after_flush", cache_composes_if_state_changed)

+         event.listen(SignallingSession, "after_commit", start_to_publish_messages)

file modified
+60 -49
@@ -4,13 +4,13 @@ 

  

  

  def running_under_virtualenv():

-     if hasattr(sys, 'real_prefix'):

+     if hasattr(sys, "real_prefix"):

          return True

      elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):

          return True

-     if os.getenv('VIRTUAL_ENV', False):

+     if os.getenv("VIRTUAL_ENV", False):

          return True

-     if '--user' in sys.argv:

+     if "--user" in sys.argv:

          return True

      return False

  
@@ -34,7 +34,7 @@ 

      else:

          if system_path is None:

              system_path = []

-         return os.path.join(*(['/'] + system_path))

+         return os.path.join(*(["/"] + system_path))

  

  

  extras_require = {}
@@ -42,51 +42,62 @@ 

      with open(os.path.join(package, "requirements.txt")) as f:

          extras_require[package] = f.readlines()

  

- extras_require['all'] = list(set(

-     requirement

-     for requirements in extras_require.values()

-     for requirement in requirements

- ))

+ extras_require["all"] = list(

+     set(

+         requirement

+         for requirements in extras_require.values()

+         for requirement in requirements

+     )

+ )

  

- with open('test-requirements.txt') as f:

+ with open("test-requirements.txt") as f:

      test_requirements = f.readlines()

  

- setup(name='odcs',

-       description='On Demand Compose Service',

-       version='0.2.48',

-       classifiers=[

-           "Programming Language :: Python",

-           "Topic :: Software Development :: Build Tools"

-       ],

-       keywords='on demand compose service modularity fedora',

-       author='The Factory 2.0 Team',

-       # TODO: Not sure which name would be used for mail alias,

-       # but let's set this proactively to the new name.

-       author_email='odcs-owner@fedoraproject.org',

-       url='https://pagure.io/odcs/',

-       license='GPLv2+',

-       packages=["odcs", "odcs.client", "odcs.server", "odcs.common"],

-       package_dir={

-           "odcs": "common/odcs",

-           "odcs.client": "client/odcs/client",

-           "odcs.server": "server/odcs/server",

-           "odcs.common": "common/odcs/common",

-       },

-       extras_require=extras_require,

-       include_package_data=True,

-       zip_safe=False,

-       install_requires=extras_require["client"],

-       tests_require=test_requirements,

-       scripts=['client/contrib/odcs', 'server/contrib/odcs-promote-compose'],

-       entry_points={

-           'console_scripts': ['odcs-upgradedb = odcs.server.manage:upgradedb [server]',

-                               'odcs-gencert = odcs.server.manage:generatelocalhostcert [server]',

-                               'odcs-frontend = odcs.server.manage:runssl [server]',

-                               'odcs-mock-runroot = odcs.server.mock_runroot:mock_runroot_main [server]',

-                               'odcs-manager = odcs.server.manage:manager_wrapper [server]'],

-       },

-       data_files=[

-           (get_dir(['etc', 'odcs']), ['server/conf/config.py', 'server/conf/pungi.conf',

-                                       'server/conf/raw_config_wrapper.conf']),

-       ],

-       )

+ setup(

+     name="odcs",

+     description="On Demand Compose Service",

+     version="0.2.48",

+     classifiers=[

+         "Programming Language :: Python",

+         "Topic :: Software Development :: Build Tools",

+     ],

+     keywords="on demand compose service modularity fedora",

+     author="The Factory 2.0 Team",

+     # TODO: Not sure which name would be used for mail alias,

+     # but let's set this proactively to the new name.

+     author_email="odcs-owner@fedoraproject.org",

+     url="https://pagure.io/odcs/",

+     license="GPLv2+",

+     packages=["odcs", "odcs.client", "odcs.server", "odcs.common"],

+     package_dir={

+         "odcs": "common/odcs",

+         "odcs.client": "client/odcs/client",

+         "odcs.server": "server/odcs/server",

+         "odcs.common": "common/odcs/common",

+     },

+     extras_require=extras_require,

+     include_package_data=True,

+     zip_safe=False,

+     install_requires=extras_require["client"],

+     tests_require=test_requirements,

+     scripts=["client/contrib/odcs", "server/contrib/odcs-promote-compose"],

+     entry_points={

+         "console_scripts": [

+             "odcs-upgradedb = odcs.server.manage:upgradedb [server]",

+             "odcs-gencert = odcs.server.manage:generatelocalhostcert [server]",

+             "odcs-frontend = odcs.server.manage:runssl [server]",

+             "odcs-mock-runroot = odcs.server.mock_runroot:mock_runroot_main [server]",

+             "odcs-manager = odcs.server.manage:manager_wrapper [server]",

+         ],

+     },

+     data_files=[

+         (

+             get_dir(["etc", "odcs"]),

+             [

+                 "server/conf/config.py",

+                 "server/conf/pungi.conf",

+                 "server/conf/raw_config_wrapper.conf",

+             ],

+         ),

+     ],

+ )

file modified
+32 -2
@@ -4,7 +4,7 @@ 

  # and then run "tox" from this directory.

  

  [tox]

- envlist = py27, py3, flake8, bandit

+ envlist = py27, py3, flake8, bandit, black

  

  [testenv]

  install_command = pip install --force-reinstall --ignore-installed {packages}
@@ -24,7 +24,7 @@ 

  basepython = python3

  skip_install = true

  deps = flake8

- commands = flake8 --ignore E501,E731,W504 --exclude server/odcs/server/migrations/*,.tox/*,build/*,.env client common server

+ commands = flake8 client common server

  

  [testenv:bandit]

  basepython = python3
@@ -34,6 +34,36 @@ 

      /bin/bash -c "bandit -r -ll $(find client common server -mindepth 1 -maxdepth 1 ! -name tests ! -name \.\* -type d -o -name \*.py)"

  ignore_outcome = True

  

+ [testenv:black]

+ basepython = python3

+ skip_install = true

+ deps = black

+ commands = black --check --diff client common server setup.py client/contrib/odcs server/contrib/odcs.wsgi server/contrib/odcs_test_deployment server/contrib/odcs-promote-compose

+ 

+ [flake8]

+ exclude =

+     .tox/*,

+     build/*,

+     doc/*,

+     server/odcs/server/migrations/*

+ 

+ filename =

+     *.py,

+     client/contrib/odcs,

+     server/contrib/odcs-promote-compose,

+     server/contrib/odcs.wsgi,

+     server/contrib/odcs_test_deployment

+ 

+ ignore =

+     # E203: whitespace before ':'

+     E203,

+     # E501: line too long

+     E501,

+     # E731: do not assign a lambda expression, use a def

+     E731,

+     # W503: line break occured before a binary operator

+     W503

+ 

  [pytest]

  addopts = --cov=odcs

  

Fixes: https://pagure.io/odcs/issue/402
JIRA: RHELCMP-1082
Signed-off-by: Haibo Lin hlin@redhat.com

1 new commit added

  • Fix flake8 issue
3 years ago

I'm not sure if it's necessary to keep os.stat(abspath) in commit Fix flake8 issue https://pagure.io/fork/hlin/odcs/c/8b132ed372fc80eb49d0d1b897041bd57968710d

rebased onto 592b43539aa8d4656d24f53efe468fc04edb2a74

3 years ago

rebased onto 7b3628133b9ae1f57b2b88489e58aefe47fd9222

3 years ago

rebased onto 5b5294687a78f267c464ace6554a3c834bd23e72

3 years ago

rebased onto ff27319af70666527662314915021f8bf42f0345

3 years ago

Jenkins will need to be updated to verify the formatting. I'll do that after this is merged.

I think the stat call is needed to verify the symlink points to an existing file. There are other ways to verify this, but this one very simple.

Looks good to me in general.

rebased onto ab59155

3 years ago

Pull-Request has been merged by lsedlar

3 years ago
Changes Summary 76
+6 -0
file changed
README.md
+200 -150
file changed
client/contrib/odcs
+1 -1
file changed
client/odcs/__init__.py
+166 -91
file changed
client/odcs/client/odcs.py
+259 -259
file changed
client/tests/test_client_odcs.py
+1 -1
file changed
common/odcs/__init__.py
+1 -2
file changed
common/odcs/common/types.py
+49 -45
file changed
server/conf/config.py
+34 -14
file changed
server/contrib/odcs-promote-compose
+4 -3
file changed
server/contrib/odcs.wsgi
+145 -67
file changed
server/contrib/odcs_test_deployment
+1 -1
file changed
server/odcs/__init__.py
+13 -15
file changed
server/odcs/server/__init__.py
+93 -45
file changed
server/odcs/server/api_utils.py
+77 -53
file changed
server/odcs/server/auth.py
+243 -147
file changed
server/odcs/server/backend.py
+7 -5
file changed
server/odcs/server/cache.py
+24 -14
file changed
server/odcs/server/celery_tasks.py
+3 -1
file changed
server/odcs/server/comps.py
+358 -324
file changed
server/odcs/server/config.py
+12 -11
file changed
server/odcs/server/events.py
+4 -3
file changed
server/odcs/server/logger.py
+35 -36
file changed
server/odcs/server/manage.py
+30 -18
file changed
server/odcs/server/mbs.py
+16 -10
file changed
server/odcs/server/mergerepo.py
+10 -10
file changed
server/odcs/server/messaging.py
+11 -6
file changed
server/odcs/server/metrics.py
+20 -13
file changed
server/odcs/server/migrations/env.py
+4 -4
file changed
server/odcs/server/migrations/versions/0571a5ca58a0_add_index_to_compose_state.py
+9 -8
file changed
server/odcs/server/migrations/versions/0d4d8e1cfe29_create_user_model.py
+6 -4
file changed
server/odcs/server/migrations/versions/11b350234051_.py
+6 -4
file changed
server/odcs/server/migrations/versions/3b92820da295_add_index_to_compose_time_to_expire.py
+4 -4
file changed
server/odcs/server/migrations/versions/4514febd31fa_add_builds.py
+19 -18
file changed
server/odcs/server/migrations/versions/566733ac3811_.py
+4 -4
file changed
server/odcs/server/migrations/versions/812f2745248f_.py
+6 -4
file changed
server/odcs/server/migrations/versions/82172e6a3154_.py
+4 -4
file changed
server/odcs/server/migrations/versions/a855c39e2a0f_store_celery_task_id.py
+4 -4
file changed
server/odcs/server/migrations/versions/a8e259e0208c_add_compose_state_reason.py
+4 -4
file changed
server/odcs/server/migrations/versions/b2725d046624_.py
+4 -4
file changed
server/odcs/server/migrations/versions/b75ad2afc207_.py
+4 -4
file changed
server/odcs/server/migrations/versions/c370b90de998_add_index_on_compose_source_type_and_.py
+8 -8
file changed
server/odcs/server/migrations/versions/cd0781bbdab1_.py
+10 -8
file changed
server/odcs/server/migrations/versions/d1da07e15c54_.py
+7 -5
file changed
server/odcs/server/migrations/versions/de0a86d7de49_.py
+10 -6
file changed
server/odcs/server/migrations/versions/e186faabdafe_.py
+8 -6
file changed
server/odcs/server/migrations/versions/e2163db7b15d_.py
+6 -4
file changed
server/odcs/server/migrations/versions/f24a36cc8a16_add_index_on_compose_reused_id.py
+4 -4
file changed
server/odcs/server/migrations/versions/f4bc999818d7_add_compose_removed_by.py
+5 -3
file changed
server/odcs/server/mock_runroot.py
+102 -71
file changed
server/odcs/server/models.py
+11 -10
file changed
server/odcs/server/proxy.py
+27 -24
file changed
server/odcs/server/pulp.py
+92 -49
file changed
server/odcs/server/pungi.py
+8 -7
file changed
server/odcs/server/pungi_compose.py
+28 -10
file changed
server/odcs/server/utils.py
+150 -131
file changed
server/odcs/server/views.py
+42 -58
file changed
server/tests/mbs.py
+144 -133
file changed
server/tests/test_auth.py
+610 -276
file changed
server/tests/test_backend.py
+2 -1
file changed
server/tests/test_backend_thread.py
+86 -31
file changed
server/tests/test_cache.py
+74 -74
file changed
server/tests/test_celery_tasks.py
+149 -56
file changed
server/tests/test_composerthread.py
+58 -44
file changed
server/tests/test_events.py
+30 -11
file changed
server/tests/test_metrics.py
+60 -20
file changed
server/tests/test_mock_runroot.py
+127 -69
file changed
server/tests/test_models.py
+72 -50
file changed
server/tests/test_pulp.py
+311 -203
file changed
server/tests/test_pungi.py
+31 -27
file changed
server/tests/test_pungi_compose.py
+56 -36
file changed
server/tests/test_remove_expired_composes_thread.py
+1 -3
file changed
server/tests/test_utils.py
+1055 -647
file changed
server/tests/test_views.py
+21 -24
file changed
server/tests/utils.py
+60 -49
file changed
setup.py
+32 -2
file changed
tox.ini