#1236 Format the coding style across the codebase using "black" and manual tweaks
Merged 5 months ago by jkaluza. Opened 5 months ago by mprahl.

file modified
+52 -56

@@ -4,37 +4,35 @@ 

  # declared properly somewhere/somehow

  confdir = path.abspath(path.dirname(__file__))

  # use parent dir as dbdir else fallback to current dir

- dbdir = path.abspath(path.join(confdir, '..')) if confdir.endswith('conf') \

-     else confdir

+ dbdir = path.abspath(path.join(confdir, "..")) if confdir.endswith("conf") else confdir

  

  

  class BaseConfiguration(object):

      DEBUG = False

      # Make this random (used to generate session keys)

-     SECRET_KEY = '74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0'

-     SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(path.join(

-         dbdir, 'module_build_service.db'))

+     SECRET_KEY = "74d9e9f9cd40e66fc6c4c2e9987dce48df3ce98542529fd0"

+     SQLALCHEMY_DATABASE_URI = "sqlite:///{0}".format(path.join(dbdir, "module_build_service.db"))

      SQLALCHEMY_TRACK_MODIFICATIONS = True

      # Where we should run when running "manage.py run" directly.

-     HOST = '0.0.0.0'

+     HOST = "0.0.0.0"

      PORT = 5000

  

      # Global network-related values, in seconds

      NET_TIMEOUT = 120

      NET_RETRY_INTERVAL = 30

  

-     SYSTEM = 'koji'

-     MESSAGING = 'fedmsg'  # or amq

-     MESSAGING_TOPIC_PREFIX = ['org.fedoraproject.prod']

-     KOJI_CONFIG = '/etc/module-build-service/koji.conf'

-     KOJI_PROFILE = 'koji'

-     ARCHES = ['i686', 'armv7hl', 'x86_64']

+     SYSTEM = "koji"

+     MESSAGING = "fedmsg"  # or amq

+     MESSAGING_TOPIC_PREFIX = ["org.fedoraproject.prod"]

+     KOJI_CONFIG = "/etc/module-build-service/koji.conf"

+     KOJI_PROFILE = "koji"

+     ARCHES = ["i686", "armv7hl", "x86_64"]

      ALLOW_ARCH_OVERRIDE = False

-     KOJI_REPOSITORY_URL = 'https://kojipkgs.fedoraproject.org/repos'

-     KOJI_TAG_PREFIXES = ['module', 'scrmod']

+     KOJI_REPOSITORY_URL = "https://kojipkgs.fedoraproject.org/repos"

+     KOJI_TAG_PREFIXES = ["module", "scrmod"]

      KOJI_ENABLE_CONTENT_GENERATOR = True

      CHECK_FOR_EOL = False

-     PDC_URL = 'https://pdc.fedoraproject.org/rest_api/v1'

+     PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1"

      PDC_INSECURE = False

      PDC_DEVELOP = True

      SCMURLS = ["https://src.fedoraproject.org/modules/"]

@@ -50,30 +48,27 @@ 

  

      ALLOW_CUSTOM_SCMURLS = False

  

-     RPMS_DEFAULT_REPOSITORY = 'https://src.fedoraproject.org/rpms/'

+     RPMS_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/rpms/"

      RPMS_ALLOW_REPOSITORY = False

-     RPMS_DEFAULT_CACHE = 'http://pkgs.fedoraproject.org/repo/pkgs/'

+     RPMS_DEFAULT_CACHE = "http://pkgs.fedoraproject.org/repo/pkgs/"

      RPMS_ALLOW_CACHE = False

  

-     MODULES_DEFAULT_REPOSITORY = 'https://src.fedoraproject.org/modules/'

+     MODULES_DEFAULT_REPOSITORY = "https://src.fedoraproject.org/modules/"

      MODULES_ALLOW_REPOSITORY = False

      MODULES_ALLOW_SCRATCH = False

  

-     ALLOWED_GROUPS = set([

-         'packager',

-         # 'modularity-wg',

-     ])

+     ALLOWED_GROUPS = set(["packager"])

  

      ALLOWED_GROUPS_TO_IMPORT_MODULE = set()

  

      # Available backends are: console and file

-     LOG_BACKEND = 'console'

+     LOG_BACKEND = "console"

  

      # Path to log file when LOG_BACKEND is set to "file".

-     LOG_FILE = 'module_build_service.log'

+     LOG_FILE = "module_build_service.log"

  

      # Available log levels are: debug, info, warn, error.

-     LOG_LEVEL = 'info'

+     LOG_LEVEL = "info"

  

      # Settings for Kerberos

      KRB_KEYTAB = None

@@ -81,31 +76,32 @@ 

  

      # AMQ prefixed variables are required only while using 'amq' as messaging backend

      # Addresses to listen to

-     AMQ_RECV_ADDRESSES = ['amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.koji',

-                           ('amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.'

-                            'module_build_service')]

+     AMQ_RECV_ADDRESSES = [

+         "amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.koji",

+         "amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.module_build_service",

+     ]

      # Address for sending messages

-     AMQ_DEST_ADDRESS = ('amqps://messaging.mydomain.com/Consumer.m8y.'

-                         'VirtualTopic.eng.module_build_service')

-     AMQ_CERT_FILE = '/etc/module_build_service/msg-m8y-client.crt'

-     AMQ_PRIVATE_KEY_FILE = '/etc/module_build_service/msg-m8y-client.key'

-     AMQ_TRUSTED_CERT_FILE = '/etc/module_build_service/Root-CA.crt'

+     AMQ_DEST_ADDRESS = \

+         "amqps://messaging.mydomain.com/Consumer.m8y.VirtualTopic.eng.module_build_service"

+     AMQ_CERT_FILE = "/etc/module_build_service/msg-m8y-client.crt"

+     AMQ_PRIVATE_KEY_FILE = "/etc/module_build_service/msg-m8y-client.key"

+     AMQ_TRUSTED_CERT_FILE = "/etc/module_build_service/Root-CA.crt"

  

      # Disable Client Authorization

      NO_AUTH = False

  

-     CACHE_DIR = '~/modulebuild/cache'

+     CACHE_DIR = "~/modulebuild/cache"

  

  

  class TestConfiguration(BaseConfiguration):

-     BUILD_LOGS_DIR = '/tmp'

-     BUILD_LOGS_NAME_FORMAT = 'build-{id}.log'

-     LOG_BACKEND = 'console'

-     LOG_LEVEL = 'debug'

-     SQLALCHEMY_DATABASE_URI = 'sqlite://'

+     BUILD_LOGS_DIR = "/tmp"

+     BUILD_LOGS_NAME_FORMAT = "build-{id}.log"

+     LOG_BACKEND = "console"

+     LOG_LEVEL = "debug"

+     SQLALCHEMY_DATABASE_URI = "sqlite://"

      DEBUG = True

-     MESSAGING = 'in_memory'

-     PDC_URL = 'https://pdc.fedoraproject.org/rest_api/v1'

+     MESSAGING = "in_memory"

+     PDC_URL = "https://pdc.fedoraproject.org/rest_api/v1"

  

      # Global network-related values, in seconds

      NET_TIMEOUT = 3

@@ -114,19 +110,19 @@ 

      SCM_NET_TIMEOUT = 0.1

      SCM_NET_RETRY_INTERVAL = 0.1

  

-     KOJI_CONFIG = './conf/koji.conf'

-     KOJI_PROFILE = 'staging'

-     SERVER_NAME = 'localhost'

+     KOJI_CONFIG = "./conf/koji.conf"

+     KOJI_PROFILE = "staging"

+     SERVER_NAME = "localhost"

  

-     KOJI_REPOSITORY_URL = 'https://kojipkgs.stg.fedoraproject.org/repos'

+     KOJI_REPOSITORY_URL = "https://kojipkgs.stg.fedoraproject.org/repos"

      SCMURLS = ["https://src.stg.fedoraproject.org/modules/"]

-     AUTH_METHOD = 'oidc'

-     RESOLVER = 'db'

+     AUTH_METHOD = "oidc"

+     RESOLVER = "db"

  

-     ALLOWED_GROUPS_TO_IMPORT_MODULE = set(['mbs-import-module'])

-     GREENWAVE_DECISION_CONTEXT = 'osci_compose_gate_modules'

+     ALLOWED_GROUPS_TO_IMPORT_MODULE = set(["mbs-import-module"])

+     GREENWAVE_DECISION_CONTEXT = "osci_compose_gate_modules"

  

-     STREAM_SUFFIXES = {r'^el\d+\.\d+\.\d+\.z$': 0.1}

+     STREAM_SUFFIXES = {r"^el\d+\.\d+\.\d+\.z$": 0.1}

  

  

  class ProdConfiguration(BaseConfiguration):

@@ -134,22 +130,22 @@ 

  

  

  class LocalBuildConfiguration(BaseConfiguration):

-     LOG_LEVEL = 'debug'

-     MESSAGING = 'in_memory'

+     LOG_LEVEL = "debug"

+     MESSAGING = "in_memory"

  

      ARCH_AUTODETECT = True

-     ARCH_FALLBACK = 'x86_64'

+     ARCH_FALLBACK = "x86_64"

  

      ALLOW_CUSTOM_SCMURLS = True

-     RESOLVER = 'mbs'

+     RESOLVER = "mbs"

      RPMS_ALLOW_REPOSITORY = True

      MODULES_ALLOW_REPOSITORY = True

  

  

  class OfflineLocalBuildConfiguration(LocalBuildConfiguration):

-     RESOLVER = 'local'

+     RESOLVER = "local"

  

  

  class DevConfiguration(LocalBuildConfiguration):

      DEBUG = True

-     LOG_BACKEND = 'console'

+     LOG_BACKEND = "console"

file modified
+4 -12

@@ -2,16 +2,8 @@ 

      logging=dict(

          loggers=dict(

              # Quiet this guy down...

-             requests={

-                 "level": "WARNING",

-                 "propagate": True,

-                 "handlers": ["console"],

-             },

-             module_build_service={

-                 "level": "INFO",

-                 "propagate": True,

-                 "handlers": ["console"],

-             },

-         ),

-     ),

+             requests={"level": "WARNING", "propagate": True, "handlers": ["console"]},

+             module_build_service={"level": "INFO", "propagate": True, "handlers": ["console"]},

+         )

+     )

  )

file modified
+1 -4

@@ -1,4 +1,1 @@ 

- config = {

-     'mbsconsumer': True,

-     'mbspoller': True,

- }

+ config = {"mbsconsumer": True, "mbspoller": True}

@@ -3,10 +3,8 @@ 

  config = {

      # Just for dev.

      "validate_signatures": False,

- 

      # Talk to the relay, so things also make it to composer.stg in our dev env

      "active": True,

- 

      # Since we're in active mode, we don't need to declare any of our own

      # passive endpoints.  This placeholder value needs to be here for the tests

      # to pass in Jenkins, though.  \o/

@@ -14,10 +12,9 @@ 

          "fedora-infrastructure": [

              # Just listen to staging for now, not to production (spam!)

              # "tcp://hub.fedoraproject.org:9940",

-             "tcp://stg.fedoraproject.org:9940",

-         ],

+             "tcp://stg.fedoraproject.org:9940"

+         ]

      },

- 

      # Start of code signing configuration

      # 'sign_messages': True,

      # 'validate_signatures': True,

@@ -37,12 +34,11 @@ 

  }

  

  # developer's instance

- if 'MODULE_BUILD_SERVICE_DEVELOPER_ENV' in os.environ and \

-    os.environ['MODULE_BUILD_SERVICE_DEVELOPER_ENV'].lower() in (

-        '1', 'on', 'true', 'y', 'yes'):

-     config['endpoints']['relay_outbound'] = ["tcp://fedmsg-relay:2001"]

-     config['relay_inbound'] = ["tcp://fedmsg-relay:2003"]

+ true_options = ("1", "on", "true", "y", "yes")

+ if os.environ.get("MODULE_BUILD_SERVICE_DEVELOPER_ENV", "").lower() in true_options:

+     config["endpoints"]["relay_outbound"] = ["tcp://fedmsg-relay:2001"]

+     config["relay_inbound"] = ["tcp://fedmsg-relay:2003"]

  else:

      # These configuration values are reasonable for most other configurations.

-     config['endpoints']['relay_outbound'] = ["tcp://127.0.0.1:4001"]

-     config['relay_inbound'] = ["tcp://127.0.0.1:2003"]

+     config["endpoints"]["relay_outbound"] = ["tcp://127.0.0.1:4001"]

+     config["relay_inbound"] = ["tcp://127.0.0.1:2003"]

@@ -46,11 +46,10 @@ 

  from sqlalchemy.pool import StaticPool

  from logging import getLogger

  import gi  # noqa

- gi.require_version('Modulemd', '1.0')  # noqa

+ gi.require_version("Modulemd", "1.0")  # noqa

  from gi.repository import Modulemd  # noqa

  

- from module_build_service.logger import (

-     init_logging, ModuleBuildLogs, level_flags, MBSLogger)

+ from module_build_service.logger import init_logging, ModuleBuildLogs, level_flags, MBSLogger

  

  from module_build_service.errors import (

      ValidationError, Unauthorized, UnprocessableEntity, Conflict, NotFound,

@@ -59,9 +58,9 @@ 

  from module_build_service.proxy import ReverseProxy

  

  try:

-     version = pkg_resources.get_distribution('module-build-service').version

+     version = pkg_resources.get_distribution("module-build-service").version

  except pkg_resources.DistributionNotFound:

-     version = 'unknown'

+     version = "unknown"

  api_version = 2

  

  app = Flask(__name__)

@@ -77,12 +76,13 @@ 

  

      This is used *only* during tests to make them faster.

      """

+ 

      def apply_driver_hacks(self, app, info, options):

-         if info.drivername == 'sqlite' and info.database in (None, '', ':memory:'):

-             options['poolclass'] = StaticPool

-             options['connect_args'] = {'check_same_thread': False}

+         if info.drivername == "sqlite" and info.database in (None, "", ":memory:"):

+             options["poolclass"] = StaticPool

+             options["connect_args"] = {"check_same_thread": False}

              try:

-                 del options['pool_size']

+                 del options["pool_size"]

              except KeyError:

                  pass

  

@@ -107,59 +107,56 @@ 

  

  def load_views():

      from module_build_service import views

+ 

      assert views

  

  

  @app.errorhandler(ValidationError)

  def validationerror_error(e):

      """Flask error handler for ValidationError exceptions"""

-     return json_error(400, 'Bad Request', str(e))

+     return json_error(400, "Bad Request", str(e))

  

  

  @app.errorhandler(Unauthorized)

  def unauthorized_error(e):

      """Flask error handler for NotAuthorized exceptions"""

-     return json_error(401, 'Unauthorized', str(e))

+     return json_error(401, "Unauthorized", str(e))

  

  

  @app.errorhandler(Forbidden)

  def forbidden_error(e):

      """Flask error handler for Forbidden exceptions"""

-     return json_error(403, 'Forbidden', str(e))

+     return json_error(403, "Forbidden", str(e))

  

  

  @app.errorhandler(RuntimeError)

  def runtimeerror_error(e):

      """Flask error handler for RuntimeError exceptions"""

      log.exception("RuntimeError exception raised")

-     return json_error(500, 'Internal Server Error', str(e))

+     return json_error(500, "Internal Server Error", str(e))

  

  

  @app.errorhandler(UnprocessableEntity)

  def unprocessableentity_error(e):

      """Flask error handler for UnprocessableEntity exceptions"""

-     return json_error(422, 'Unprocessable Entity', str(e))

+     return json_error(422, "Unprocessable Entity", str(e))

  

  

  @app.errorhandler(Conflict)

  def conflict_error(e):

      """Flask error handler for Conflict exceptions"""

-     return json_error(409, 'Conflict', str(e))

+     return json_error(409, "Conflict", str(e))

  

  

  @app.errorhandler(NotFound)

  def notfound_error(e):

      """Flask error handler for Conflict exceptions"""

-     return json_error(404, 'Not Found', str(e))

+     return json_error(404, "Not Found", str(e))

  

  

  init_logging(conf)

  log = MBSLogger()

- build_logs = ModuleBuildLogs(

-     conf.build_logs_dir,

-     conf.build_logs_name_format,

-     conf.log_level,

- )

+ build_logs = ModuleBuildLogs(conf.build_logs_dir, conf.build_logs_name_format, conf.log_level)

  

  

  def get_url_for(*args, **kwargs):

@@ -171,11 +168,13 @@ 

  

      # Localhost is right URL only when the scheduler runs on the same

      # system as the web views.

-     app.config['SERVER_NAME'] = 'localhost'

+     app.config["SERVER_NAME"] = "localhost"

      with app.app_context():

-         log.debug("WARNING: get_url_for() has been called without the Flask "

-                   "app_context. That can lead to SQLAlchemy errors caused by "

-                   "multiple session being used in the same time.")

+         log.debug(

+             "WARNING: get_url_for() has been called without the Flask "

+             "app_context. That can lead to SQLAlchemy errors caused by "

+             "multiple session being used in the same time."

+         )

          return url_for(*args, **kwargs)

  

  

file modified
+61 -56

@@ -31,6 +31,7 @@ 

  import requests

  import kerberos

  from flask import Response, g

+ 

  # Starting with Flask 0.9, the _app_ctx_stack is the correct one,

  # before that we need to use the _request_ctx_stack.

  try:

@@ -50,12 +51,12 @@ 

  

  

  client_secrets = None

- region = make_region().configure('dogpile.cache.memory')

+ region = make_region().configure("dogpile.cache.memory")

  

  

  def _json_loads(content):

      if not isinstance(content, str):

-         content = content.decode('utf-8')

+         content = content.decode("utf-8")

      return json.loads(content)

  

  

@@ -67,8 +68,7 @@ 

      if "OIDC_CLIENT_SECRETS" not in app.config:

          raise Forbidden("OIDC_CLIENT_SECRETS must be set in server config.")

  

-     secrets = _json_loads(open(app.config['OIDC_CLIENT_SECRETS'],

-                                'r').read())

+     secrets = _json_loads(open(app.config["OIDC_CLIENT_SECRETS"], "r").read())

      client_secrets = list(secrets.values())[0]

  

  

@@ -79,13 +79,15 @@ 

      if not client_secrets:

          return None

  

-     request = {'token': token,

-                'token_type_hint': 'Bearer',

-                'client_id': client_secrets['client_id'],

-                'client_secret': client_secrets['client_secret']}

-     headers = {'Content-type': 'application/x-www-form-urlencoded'}

+     request = {

+         "token": token,

+         "token_type_hint": "Bearer",

+         "client_id": client_secrets["client_id"],

+         "client_secret": client_secrets["client_secret"],

+     }

+     headers = {"Content-type": "application/x-www-form-urlencoded"}

  

-     resp = requests.post(client_secrets['token_introspection_uri'], data=request, headers=headers)

+     resp = requests.post(client_secrets["token_introspection_uri"], data=request, headers=headers)

      return resp.json()

  

  

@@ -96,8 +98,8 @@ 

      if not client_secrets:

          return None

  

-     headers = {'authorization': 'Bearer ' + token}

-     resp = requests.get(client_secrets['userinfo_uri'], headers=headers)

+     headers = {"authorization": "Bearer " + token}

+     resp = requests.get(client_secrets["userinfo_uri"], headers=headers)

      return resp.json()

  

  

@@ -110,8 +112,8 @@ 

      if "authorization" not in request.headers:

          raise Unauthorized("No 'authorization' header found.")

  

-     header = request.headers['authorization'].strip()

-     prefix = 'Bearer '

+     header = request.headers["authorization"].strip()

+     prefix = "Bearer "

      if not header.startswith(prefix):

          raise Unauthorized("Authorization headers must start with %r" % prefix)

  

@@ -129,16 +131,15 @@ 

      if "OIDC_REQUIRED_SCOPE" not in app.config:

          raise Forbidden("OIDC_REQUIRED_SCOPE must be set in server config.")

  

-     presented_scopes = data['scope'].split(' ')

+     presented_scopes = data["scope"].split(" ")

      required_scopes = [

-         'openid',

-         'https://id.fedoraproject.org/scope/groups',

+         "openid",

+         "https://id.fedoraproject.org/scope/groups",

          app.config["OIDC_REQUIRED_SCOPE"],

      ]

      for scope in required_scopes:

          if scope not in presented_scopes:

-             raise Unauthorized("Required OIDC scope %r not present: %r" % (

-                 scope, presented_scopes))

+             raise Unauthorized("Required OIDC scope %r not present: %r" % (scope, presented_scopes))

  

      try:

          extended_data = _get_user_info(token)

@@ -153,7 +154,7 @@ 

          groups = set()

      else:

          try:

-             groups = set(extended_data['groups'])

+             groups = set(extended_data["groups"])

          except Exception as e:

              error = "Could not find groups in UserInfo from OIDC %s" % str(e)

              log.exception(extended_data)

@@ -175,19 +176,20 @@ 

          # If the config specifies a keytab to use, then override the KRB5_KTNAME

          # environment variable

          if conf.kerberos_keytab:

-             os.environ['KRB5_KTNAME'] = conf.kerberos_keytab

+             os.environ["KRB5_KTNAME"] = conf.kerberos_keytab

  

-         if 'KRB5_KTNAME' in os.environ:

+         if "KRB5_KTNAME" in os.environ:

              try:

-                 principal = kerberos.getServerPrincipalDetails('HTTP', hostname)

+                 principal = kerberos.getServerPrincipalDetails("HTTP", hostname)

              except kerberos.KrbError as error:

-                 raise Unauthorized(

-                     'Kerberos: authentication failed with "{0}"'.format(str(error)))

+                 raise Unauthorized('Kerberos: authentication failed with "{0}"'.format(str(error)))

  

              log.debug('Kerberos: server is identifying as "{0}"'.format(principal))

          else:

-             raise Unauthorized('Kerberos: set the config value of "KERBEROS_KEYTAB" or the '

-                                'environment variable "KRB5_KTNAME" to your keytab file')

+             raise Unauthorized(

+                 'Kerberos: set the config value of "KERBEROS_KEYTAB" or the '

+                 'environment variable "KRB5_KTNAME" to your keytab file'

+             )

  

      def _gssapi_authenticate(self, token):

          """

@@ -201,23 +203,23 @@ 

          try:

              rc, state = kerberos.authGSSServerInit(self.service_name)

              if rc != kerberos.AUTH_GSS_COMPLETE:

-                 log.error('Kerberos: unable to initialize server context')

+                 log.error("Kerberos: unable to initialize server context")

                  return None

  

              rc = kerberos.authGSSServerStep(state, token)

              if rc == kerberos.AUTH_GSS_COMPLETE:

-                 log.debug('Kerberos: completed GSSAPI negotiation')

+                 log.debug("Kerberos: completed GSSAPI negotiation")

                  ctx.kerberos_token = kerberos.authGSSServerResponse(state)

                  ctx.kerberos_user = kerberos.authGSSServerUserName(state)

                  return rc

              elif rc == kerberos.AUTH_GSS_CONTINUE:

-                 log.debug('Kerberos: continuing GSSAPI negotiation')

+                 log.debug("Kerberos: continuing GSSAPI negotiation")

                  return kerberos.AUTH_GSS_CONTINUE

              else:

-                 log.debug('Kerberos: unable to step server context')

+                 log.debug("Kerberos: unable to step server context")

                  return None

          except kerberos.GSSError as error:

-             log.error('Kerberos: unable to authenticate: {0}'.format(str(error)))

+             log.error("Kerberos: unable to authenticate: {0}".format(str(error)))

              return None

          finally:

              if state:

@@ -235,25 +237,25 @@ 

              kerberos_user = ctx.kerberos_user

              kerberos_token = ctx.kerberos_token

          elif rc != kerberos.AUTH_GSS_CONTINUE:

-             raise Forbidden('Invalid Kerberos ticket')

+             raise Forbidden("Invalid Kerberos ticket")

  

          return kerberos_user, kerberos_token

  

  

  def get_user_kerberos(request):

      user = None

-     if 'Authorization' not in request.headers:

-         response = Response('Unauthorized', 401, {'WWW-Authenticate': 'Negotiate'})

+     if "Authorization" not in request.headers:

+         response = Response("Unauthorized", 401, {"WWW-Authenticate": "Negotiate"})

          exc = FlaskUnauthorized()

          # For some reason, certain versions of werkzeug raise an exception when passing `response`

          # in the constructor. This is a work-around.

          exc.response = response

          raise exc

-     header = request.headers.get('Authorization')

-     token = ''.join(header.strip().split()[1:])

+     header = request.headers.get("Authorization")

+     token = "".join(header.strip().split()[1:])

      user, kerberos_token = KerberosAuthenticate().process_request(token)

      # Remove the realm

-     user = user.split('@')[0]

+     user = user.split("@")[0]

      # If the user is part of the whitelist, then the group membership check is skipped

      if user in conf.allowed_users:

          groups = []

@@ -275,20 +277,21 @@ 

  class Ldap(object):

      """ A class that handles LDAP connections and queries

      """

+ 

      connection = None

      base_dn = None

  

      def __init__(self):

          if not conf.ldap_uri:

-             raise Forbidden('LDAP_URI must be set in server config.')

+             raise Forbidden("LDAP_URI must be set in server config.")

          if conf.ldap_groups_dn:

              self.base_dn = conf.ldap_groups_dn

          else:

-             raise Forbidden('LDAP_GROUPS_DN must be set in server config.')

+             raise Forbidden("LDAP_GROUPS_DN must be set in server config.")

  

-         if conf.ldap_uri.startswith('ldaps://'):

-             tls = ldap3.Tls(ca_certs_file='/etc/pki/tls/certs/ca-bundle.crt',

-                             validate=ssl.CERT_REQUIRED)

+         if conf.ldap_uri.startswith("ldaps://"):

+             tls = ldap3.Tls(

+                 ca_certs_file="/etc/pki/tls/certs/ca-bundle.crt", validate=ssl.CERT_REQUIRED)

              server = ldap3.Server(conf.ldap_uri, use_ssl=True, tls=tls)

          else:

              server = ldap3.Server(conf.ldap_uri)

@@ -296,26 +299,28 @@ 

          try:

              self.connection.open()

          except ldap3.core.exceptions.LDAPSocketOpenError as error:

-             log.error('The connection to "{0}" failed. The following error was raised: {1}'

-                       .format(conf.ldap_uri, str(error)))

-             raise Forbidden('The connection to the LDAP server failed. Group membership '

-                             'couldn\'t be obtained.')

+             log.error(

+                 'The connection to "{0}" failed. The following error was raised: {1}'.format(

+                     conf.ldap_uri, str(error)))

+             raise Forbidden(

+                 "The connection to the LDAP server failed. Group membership couldn't be obtained.")

  

      def get_user_membership(self, uid):

          """ Gets the group membership of a user

          :param uid: a string of the uid of the user

          :return: a list of common names of the posixGroups the user is a member of

          """

-         ldap_filter = '(memberUid={0})'.format(uid)

+         ldap_filter = "(memberUid={0})".format(uid)

          # Only get the groups in the base container/OU

-         self.connection.search(self.base_dn, ldap_filter, search_scope=ldap3.LEVEL,

-                                attributes=['cn'])

+         self.connection.search(

+             self.base_dn, ldap_filter, search_scope=ldap3.LEVEL, attributes=["cn"])

          groups = self.connection.response

          try:

-             return [group['attributes']['cn'][0] for group in groups]

+             return [group["attributes"]["cn"][0] for group in groups]

          except KeyError:

-             log.exception('The LDAP groups could not be determined based on the search results '

-                           'of "{0}"'.format(str(groups)))

+             log.exception(

+                 "The LDAP groups could not be determined based on the search results "

+                 'of "{0}"'.format(str(groups)))

              return []

  

  

@@ -326,11 +331,11 @@ 

      membership such as ('mprahl', {'factory2', 'devel'})

      """

      if conf.no_auth is True:

-         log.debug('Authorization is disabled.')

-         return 'anonymous', {'packager'}

+         log.debug("Authorization is disabled.")

+         return "anonymous", {"packager"}

  

      if "user" not in g and "groups" not in g:

-         get_user_func_name = 'get_user_{0}'.format(conf.auth_method)

+         get_user_func_name = "get_user_{0}".format(conf.auth_method)

          get_user_func = globals().get(get_user_func_name)

          if not get_user_func:

              raise RuntimeError('The function "{0}" is not implemented'.format(get_user_func_name))

@@ -33,11 +33,11 @@ 

      # input only since 0.11, but RHEL7 contains 0.10.1.

      # https://github.com/pallets/flask/commit/daceb3e3a028b4b408c4bbdbdef0047f1de3a7c9

      indent = None

-     separators = (',', ':')

+     separators = (",", ":")

  

-     if module_build_service.app.config['JSONIFY_PRETTYPRINT_REGULAR'] and not request.is_xhr:

+     if module_build_service.app.config["JSONIFY_PRETTYPRINT_REGULAR"] and not request.is_xhr:

          indent = 2

-         separators = (', ', ': ')

+         separators = (", ", ": ")

  

      if args and kwargs:

          raise TypeError("jsonify() behavior undefined when passed both args and kwargs")

@@ -51,6 +51,5 @@ 

      # Note that we add '\n' to end of response

      # (see https://github.com/mitsuhiko/flask/pull/1262)

      rv = module_build_service.app.response_class(

-         (dumps(data, indent=indent, separators=separators), '\n'),

-         mimetype='application/json')

+         (dumps(data, indent=indent, separators=separators), "\n"), mimetype="application/json")

      return rv

@@ -52,6 +52,7 @@ 

  

  def get_session(config, login=True):

      from module_build_service.builder.KojiModuleBuilder import KojiModuleBuilder

+ 

      return KojiModuleBuilder.get_session(config, login=login)

  

  

@@ -69,7 +70,7 @@ 

      """

      for suffix in suffixes:

          if s.endswith(suffix):

-             s = s[:-len(suffix)]

+             s = s[: -len(suffix)]

              break

      return s

  

@@ -79,8 +80,9 @@ 

      Wrapper around KojiModuleBuilder.koji_retrying_multicall_map, because

      we cannot import that method normally because of import loop.

      """

-     from module_build_service.builder.KojiModuleBuilder import \

-         koji_retrying_multicall_map as multicall

+     from module_build_service.builder.KojiModuleBuilder import (

+         koji_retrying_multicall_map as multicall,)

+ 

      return multicall(*args, **kwargs)

  

  

@@ -109,7 +111,7 @@ 

          return "<KojiContentGenerator module: %s>" % (self.module_name)

  

      @staticmethod

-     def parse_rpm_output(output, tags, separator=';'):

+     def parse_rpm_output(output, tags, separator=";"):

          """

          Copied from:

          https://github.com/projectatomic/atomic-reactor/blob/master/atomic_reactor/plugins/exit_koji_promote.py

@@ -130,42 +132,42 @@ 

              except ValueError:

                  return None

  

-             if value == '(none)':

+             if value == "(none)":

                  return None

  

              return value

  

          components = []

-         sigmarker = 'Key ID '

+         sigmarker = "Key ID "

          for rpm in output:

-             fields = rpm.rstrip('\n').split(separator)

+             fields = rpm.rstrip("\n").split(separator)

              if len(fields) < len(tags):

                  continue

  

-             signature = field('SIGPGP:pgpsig') or field('SIGGPG:pgpsig')

+             signature = field("SIGPGP:pgpsig") or field("SIGGPG:pgpsig")

              if signature:

                  parts = signature.split(sigmarker, 1)

                  if len(parts) > 1:

                      signature = parts[1]

  

              component_rpm = {

-                 u'type': u'rpm',

-                 u'name': field('NAME'),

-                 u'version': field('VERSION'),

-                 u'release': field('RELEASE'),

-                 u'arch': field('ARCH'),

-                 u'sigmd5': field('SIGMD5'),

-                 u'signature': signature,

+                 u"type": u"rpm",

+                 u"name": field("NAME"),

+                 u"version": field("VERSION"),

+                 u"release": field("RELEASE"),

+                 u"arch": field("ARCH"),

+                 u"sigmd5": field("SIGMD5"),

+                 u"signature": signature,

              }

  

              # Special handling for epoch as it must be an integer or None

-             epoch = field('EPOCH')

+             epoch = field("EPOCH")

              if epoch is not None:

                  epoch = int(epoch)

  

-             component_rpm[u'epoch'] = epoch

+             component_rpm[u"epoch"] = epoch

  

-             if component_rpm['name'] != 'gpg-pubkey':

+             if component_rpm["name"] != "gpg-pubkey":

                  components.append(component_rpm)

  

          return components

@@ -177,28 +179,25 @@ 

  

          Build a list of installed RPMs in the format required for the

          metadata.

-         """ # noqa

+         """  # noqa

  

          tags = [

-             'NAME',

-             'VERSION',

-             'RELEASE',

-             'ARCH',

-             'EPOCH',

-             'SIGMD5',

-             'SIGPGP:pgpsig',

-             'SIGGPG:pgpsig',

+             "NAME",

+             "VERSION",

+             "RELEASE",

+             "ARCH",

+             "EPOCH",

+             "SIGMD5",

+             "SIGPGP:pgpsig",

+             "SIGGPG:pgpsig",

          ]

  

-         sep = ';'

+         sep = ";"

          fmt = sep.join(["%%{%s}" % tag for tag in tags])

          cmd = "/bin/rpm -qa --qf '{0}\n'".format(fmt)

-         with open('/dev/null', 'r+') as devnull:

-             p = subprocess.Popen(cmd,

-                                  shell=True,

-                                  stdin=devnull,

-                                  stdout=subprocess.PIPE,

-                                  stderr=devnull)

+         with open("/dev/null", "r+") as devnull:

+             p = subprocess.Popen(

+                 cmd, shell=True, stdin=devnull, stdout=subprocess.PIPE, stderr=devnull)

  

              (stdout, stderr) = p.communicate()

              status = p.wait()

@@ -216,16 +215,12 @@ 

          # TODO: In libmodulemd v1.5, there'll be a property we can check instead

          # of using RPM

          try:

-             libmodulemd_version = subprocess.check_output(

-                 ['rpm', '--queryformat', '%{VERSION}', '-q', 'libmodulemd'],

-                 universal_newlines=True).strip()

+             cmd = ["rpm", "--queryformat", "%{VERSION}", "-q", "libmodulemd"]

+             libmodulemd_version = subprocess.check_output(cmd, universal_newlines=True).strip()

          except subprocess.CalledProcessError:

-             libmodulemd_version = 'unknown'

+             libmodulemd_version = "unknown"

  

-         return [{

-             'name': 'libmodulemd',

-             'version': libmodulemd_version

-         }]

+         return [{"name": "libmodulemd", "version": libmodulemd_version}]

  

      def _koji_rpms_in_tag(self, tag):

          """ Return the list of koji rpms in a tag. """

@@ -257,17 +252,20 @@ 

          # Prepare the arguments for Koji multicall.

          # We will call session.getRPMHeaders(...) for each SRC RPM to get exclusivearch,

          # excludearch and license headers.

-         multicall_kwargs = [{"rpmID": rpm_id,

-                              "headers": ["exclusivearch", "excludearch", "license"]}

-                             for rpm_id in src_rpms.keys()]

+         multicall_kwargs = [

+             {"rpmID": rpm_id, "headers": ["exclusivearch", "excludearch", "license"]}

+             for rpm_id in src_rpms.keys()

+         ]

          # For each binary RPM, we only care about the "license" header.

-         multicall_kwargs += [{"rpmID": rpm_id, "headers": ["license"]}

-                              for rpm_id in binary_rpms.keys()]

+         multicall_kwargs += [

+             {"rpmID": rpm_id, "headers": ["license"]} for rpm_id in binary_rpms.keys()

+         ]

          rpms_headers = koji_retrying_multicall_map(

-             session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs)

+             session, session.getRPMHeaders, list_of_kwargs=multicall_kwargs

+         )

  

          # Temporary dict with build_id as a key to find builds easily.

-         builds = {build['build_id']: build for build in builds}

+         builds = {build["build_id"]: build for build in builds}

  

          # Create a mapping of build IDs to SRPM NEVRAs so that the for loop below can directly

          # access these values when adding the `srpm_nevra` key to the returned RPMs

@@ -280,8 +278,7 @@ 

          # also other useful data from the Build associated with the RPM.

          for rpm, headers in zip(chain(src_rpms.values(), binary_rpms.values()), rpms_headers):

              if not headers:

-                 raise RuntimeError(

-                     "No RPM headers received from Koji for RPM %s" % rpm["name"])

+                 raise RuntimeError("No RPM headers received from Koji for RPM %s" % rpm["name"])

              if "license" not in headers:

                  raise RuntimeError(

                      "No RPM 'license' header received from Koji for RPM %s" % rpm["name"])

@@ -291,44 +288,42 @@ 

                  build["excludearch"] = headers["excludearch"]

  

              rpm["license"] = headers["license"]

-             rpm['srpm_name'] = build['name']

-             rpm['srpm_nevra'] = build_id_to_srpm_nevra[rpm["build_id"]]

-             rpm['exclusivearch'] = build['exclusivearch']

-             rpm['excludearch'] = build['excludearch']

+             rpm["srpm_name"] = build["name"]

+             rpm["srpm_nevra"] = build_id_to_srpm_nevra[rpm["build_id"]]

+             rpm["exclusivearch"] = build["exclusivearch"]

+             rpm["excludearch"] = build["excludearch"]

  

          return rpms

  

      def _get_build(self):

          ret = {}

-         ret[u'name'] = self.module.name

+         ret[u"name"] = self.module.name

          if self.devel:

-             ret['name'] += "-devel"

-         ret[u'version'] = self.module.stream.replace("-", "_")

+             ret["name"] += "-devel"

+         ret[u"version"] = self.module.stream.replace("-", "_")

          # Append the context to the version to make NVRs of modules unique in the event of

          # module stream expansion

-         ret[u'release'] = '{0}.{1}'.format(self.module.version, self.module.context)

-         ret[u'source'] = self.module.scmurl

-         ret[u'start_time'] = calendar.timegm(

-             self.module.time_submitted.utctimetuple())

-         ret[u'end_time'] = calendar.timegm(

-             self.module.time_completed.utctimetuple())

-         ret[u'extra'] = {

+         ret[u"release"] = "{0}.{1}".format(self.module.version, self.module.context)

+         ret[u"source"] = self.module.scmurl

+         ret[u"start_time"] = calendar.timegm(self.module.time_submitted.utctimetuple())

+         ret[u"end_time"] = calendar.timegm(self.module.time_completed.utctimetuple())

+         ret[u"extra"] = {

              u"typeinfo": {

                  u"module": {

                      u"module_build_service_id": self.module.id,

                      u"content_koji_tag": self.module.koji_tag,

                      u"modulemd_str": self.module.modulemd,

-                     u"name": ret['name'],

+                     u"name": ret["name"],

                      u"stream": self.module.stream,

                      u"version": self.module.version,

-                     u"context": self.module.context

+                     u"context": self.module.context,

                  }

              }

          }

          session = get_session(self.config, login=False)

          # Only add the CG build owner if the user exists in Koji

          if session.getUser(self.owner):

-             ret[u'owner'] = self.owner

+             ret[u"owner"] = self.owner

          return ret

  

      def _get_buildroot(self):

@@ -338,18 +333,15 @@ 

              u"id": 1,

              u"host": {

                  u"arch": text_type(platform.machine()),

-                 u'os': u"%s %s" % (distro[0], distro[1])

+                 u"os": u"%s %s" % (distro[0], distro[1]),

              },

              u"content_generator": {

                  u"name": u"module-build-service",

-                 u"version": text_type(version)

-             },

-             u"container": {

-                 u"arch": text_type(platform.machine()),

-                 u"type": u"none"

+                 u"version": text_type(version),

              },

+             u"container": {u"arch": text_type(platform.machine()), u"type": u"none"},

              u"components": self.__get_rpms(),

-             u"tools": self.__get_tools()

+             u"tools": self.__get_tools(),

          }

          return ret

  

@@ -368,7 +360,7 @@ 

              u"arch": rpm["arch"],

              u"epoch": rpm["epoch"],

              u"sigmd5": rpm["payloadhash"],

-             u"type": u"rpm"

+             u"type": u"rpm",

          }

  

      def _get_arch_mmd_output(self, output_path, arch):

@@ -385,15 +377,11 @@ 

          :return: Dictionary with record in "output" list.

          """

          ret = {

-             'buildroot_id': 1,

-             'arch': arch,

-             'type': 'file',

-             'extra': {

-                 'typeinfo': {

-                     'module': {}

-                 }

-             },

-             'checksum_type': 'md5',

+             "buildroot_id": 1,

+             "arch": arch,

+             "type": "file",

+             "extra": {"typeinfo": {"module": {}}},

+             "checksum_type": "md5",

          }

  

          # Noarch architecture represents "generic" modulemd.txt.

@@ -406,13 +394,13 @@ 

          # parse it to get the Modulemd instance.

          mmd_path = os.path.join(output_path, mmd_filename)

          try:

-             with open(mmd_path, 'rb') as mmd_f:

+             with open(mmd_path, "rb") as mmd_f:

                  raw_data = mmd_f.read()

                  data = to_text_type(raw_data)

                  mmd = load_mmd(data)

-                 ret['filename'] = mmd_filename

-                 ret['filesize'] = len(raw_data)

-                 ret['checksum'] = hashlib.md5(raw_data).hexdigest()

+                 ret["filename"] = mmd_filename

+                 ret["filesize"] = len(raw_data)

+                 ret["checksum"] = hashlib.md5(raw_data).hexdigest()

          except IOError:

              if arch == "src":

                  # This might happen in case the Module is submitted directly

@@ -428,8 +416,7 @@ 

          if arch in ["noarch", "src"]:

              # For generic noarch/src modulemd, include all the RPMs.

              for rpm in self.rpms:

-                 components.append(

-                     self._koji_rpm_to_component_record(rpm))

+                 components.append(self._koji_rpm_to_component_record(rpm))

          else:

              # Check the RPM artifacts built for this architecture in modulemd file,

              # find the matching RPM in the `rpms_dict` coming from Koji and use it

@@ -438,11 +425,10 @@ 

              # RPM sigmd5 signature is not stored in MMD.

              for rpm in mmd.get_rpm_artifacts().get():

                  if rpm not in self.rpms_dict:

-                     raise RuntimeError("RPM %s found in the final modulemd but not "

-                                        "in Koji tag." % rpm)

+                     raise RuntimeError(

+                         "RPM %s found in the final modulemd but not in Koji tag." % rpm)

                  tag_rpm = self.rpms_dict[rpm]

-                 components.append(

-                     self._koji_rpm_to_component_record(tag_rpm))

+                 components.append(self._koji_rpm_to_component_record(tag_rpm))

          ret["components"] = components

          return ret

  

@@ -455,18 +441,18 @@ 

  

          try:

              log_path = os.path.join(output_path, "build.log")

-             with open(log_path, 'rb') as build_log:

+             with open(log_path, "rb") as build_log:

                  checksum = hashlib.md5(build_log.read()).hexdigest()

              stat = os.stat(log_path)

              ret.append(

                  {

-                     u'buildroot_id': 1,

-                     u'arch': u'noarch',

-                     u'type': u'log',

-                     u'filename': u'build.log',

-                     u'filesize': stat.st_size,

-                     u'checksum_type': u'md5',

-                     u'checksum': checksum

+                     u"buildroot_id": 1,

+                     u"arch": u"noarch",

+                     u"type": u"log",

+                     u"filename": u"build.log",

+                     u"filesize": stat.st_size,

+                     u"checksum_type": u"md5",

+                     u"checksum": checksum,

                  }

              )

          except IOError:

@@ -480,7 +466,7 @@ 

              u"metadata_version": 0,

              u"buildroots": [self._get_buildroot()],

              u"build": self._get_build(),

-             u"output": self._get_output(output_path)

+             u"output": self._get_output(output_path),

          }

  

          return ret

@@ -567,12 +553,10 @@ 

          # For example:

          #   "x86_64" -> ['athlon', 'i386', 'i586', 'i486', 'i686']

          #   "i686" -> []

-         multilib_arches = set(compatible_arches) - set(

-             pungi.arch.get_compatible_arches(arch))

+         multilib_arches = set(compatible_arches) - set(pungi.arch.get_compatible_arches(arch))

          # List of architectures that should be in ExclusiveArch tag or missing

          # from ExcludeArch tag. Multilib should not be enabled here.

-         exclusive_arches = pungi.arch.get_valid_arches(

-             arch, multilib=False, add_noarch=False)

+         exclusive_arches = pungi.arch.get_valid_arches(arch, multilib=False, add_noarch=False)

  

          # Modulemd.SimpleSet into which we will add the RPMs.

          rpm_artifacts = Modulemd.SimpleSet()

@@ -605,8 +589,7 @@ 

              # - the architecture of an RPM is not multilib architecture for `arch`.

              # - the architecture of an RPM is not the final mmd architecture.

              # - the architecture of an RPM is not "noarch" or "src".

-             if (rpm["arch"] not in multilib_arches and

-                     rpm["arch"] not in [arch, "noarch", "src"]):

+             if rpm["arch"] not in multilib_arches and rpm["arch"] not in [arch, "noarch", "src"]:

                  continue

  

              # Skip the RPM if it is excluded on this arch or exclusive

@@ -728,8 +711,7 @@ 

          commit = xmd.get("mbs", {}).get("commit")

          scmurl = xmd.get("mbs", {}).get("scmurl")

          if not commit or not scmurl:

-             log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.",

-                         self.module)

+             log.warning("%r: xmd['mbs'] does not contain 'commit' or 'scmurl'.", self.module)

              return

  

          td = None

@@ -747,9 +729,7 @@ 

                  if td is not None:

                      shutil.rmtree(td)

              except Exception as e:

-                 log.warning(

-                     "Failed to remove temporary directory {!r}: {}".format(

-                         td, str(e)))

+                 log.warning("Failed to remove temporary directory {!r}: {}".format(td, str(e)))

  

      def _prepare_file_directory(self):

          """ Creates a temporary directory that will contain all the files

@@ -787,10 +767,10 @@ 

          Uploads output files to Koji hub.

          """

          to_upload = []

-         for info in metadata['output']:

-             if info.get('metadata_only', False):

+         for info in metadata["output"]:

+             if info.get("metadata_only", False):

                  continue

-             localpath = os.path.join(file_dir, info['filename'])

+             localpath = os.path.join(file_dir, info["filename"])

              if not os.path.exists(localpath):

                  err = "Cannot upload %s to Koji. No such file." % localpath

                  log.error(err)

@@ -799,7 +779,7 @@ 

              to_upload.append([localpath, info])

  

          # Create unique server directory.

-         serverdir = 'mbs/%r.%d' % (time.time(), self.module.id)

+         serverdir = "mbs/%r.%d" % (time.time(), self.module.id)

  

          for localpath, info in to_upload:

              log.info("Uploading %s to Koji" % localpath)

@@ -816,8 +796,8 @@ 

  

          tag_name = self.module.cg_build_koji_tag

          if not tag_name:

-             log.info("%r: Not tagging Content Generator build, no "

-                      "cg_build_koji_tag set", self.module)

+             log.info(

+                 "%r: Not tagging Content Generator build, no cg_build_koji_tag set", self.module)

              return

  

          tag_names_to_try = [tag_name, self.config.koji_cg_default_build_tag]

@@ -827,20 +807,19 @@ 

              if tag_info:

                  break

  

-             log.info("%r: Tag %s not found in Koji, trying next one.",

-                      self.module, tag)

+             log.info("%r: Tag %s not found in Koji, trying next one.", self.module, tag)

  

          if not tag_info:

              log.warning(

-                 "%r:, Not tagging Content Generator build, no available tag"

-                 " found, tried %r", self.module, tag_names_to_try)

+                 "%r:, Not tagging Content Generator build, no available tag found, tried %r",

+                 self.module, tag_names_to_try,

+             )

              return

  

          build = self._get_build()

          nvr = "%s-%s-%s" % (build["name"], build["version"], build["release"])

  

-         log.info("Content generator build %s will be tagged as %s in "

-                  "Koji", nvr, tag)

+         log.info("Content generator build %s will be tagged as %s in Koji", nvr, tag)

          session.tagBuild(tag_info["id"], nvr)

  

      def _load_koji_tag(self, koji_session):

@@ -879,7 +858,7 @@ 

              except koji.GenericError as e:

                  if "Build already exists" not in str(e):

                      raise

-                 log.warning('Failed to import content generator')

+                 log.warning("Failed to import content generator")

                  build_info = None

              if conf.koji_cg_tag_build:

                  self._tag_cg_build()

@@ -40,7 +40,7 @@ 

  import munch

  from itertools import chain

  from OpenSSL.SSL import SysCallError

- 

+ import textwrap

  

  from module_build_service import log, conf, models

  import module_build_service.scm

@@ -76,8 +76,10 @@ 

      if list_of_args is None and list_of_kwargs is None:

          raise ProgrammingError("One of list_of_args or list_of_kwargs must be set.")

  

-     if (type(list_of_args) not in [type(None), list] or

-             type(list_of_kwargs) not in [type(None), list]):

+     if (

+         type(list_of_args) not in [type(None), list]

+         or type(list_of_kwargs) not in [type(None), list]

+     ):

          raise ProgrammingError("list_of_args and list_of_kwargs must be list or None.")

  

      if list_of_kwargs is None:

@@ -99,16 +101,19 @@ 

      try:

          responses = koji_session.multiCall(strict=True)

      except Exception:

-         log.exception("Exception raised for multicall of method %r with args %r, %r:",

-                       koji_session_fnc, args, kwargs)

+         log.exception(

+             "Exception raised for multicall of method %r with args %r, %r:",

+             koji_session_fnc, args, kwargs,

+         )

          return None

  

      if not responses:

          log.error("Koji did not return response for multicall of %r", koji_session_fnc)

          return None

      if type(responses) != list:

-         log.error("Fault element was returned for multicall of method %r: %r",

-                   koji_session_fnc, responses)

+         log.error(

+             "Fault element was returned for multicall of method %r: %r", koji_session_fnc, responses

+         )

          return None

  

      results = []

@@ -122,13 +127,17 @@ 

      for response, args, kwargs in zip(responses, list_of_args, list_of_kwargs):

          if type(response) == list:

              if not response:

-                 log.error("Empty list returned for multicall of method %r with args %r, %r",

-                           koji_session_fnc, args, kwargs)

+                 log.error(

+                     "Empty list returned for multicall of method %r with args %r, %r",

+                     koji_session_fnc, args, kwargs

+                 )

                  return None

              results.append(response[0])

          else:

-             log.error("Unexpected data returned for multicall of method %r with args %r, %r: %r",

-                       koji_session_fnc, args, kwargs, response)

+             log.error(

+                 "Unexpected data returned for multicall of method %r with args %r, %r: %r",

+                 koji_session_fnc, args, kwargs, response

+             )

              return None

  

      return results

@@ -150,9 +159,9 @@ 

  

      backend = "koji"

      _build_lock = threading.Lock()

-     region = dogpile.cache.make_region().configure('dogpile.cache.memory')

+     region = dogpile.cache.make_region().configure("dogpile.cache.memory")

  

-     @module_build_service.utils.validate_koji_tag('tag_name')

+     @module_build_service.utils.validate_koji_tag("tag_name")

      def __init__(self, owner, module, config, tag_name, components):

          """

          :param owner: a string representing who kicked off the builds

@@ -186,12 +195,11 @@ 

          self.components = components

  

      def __repr__(self):

-         return "<KojiModuleBuilder module: %s, tag: %s>" % (

-             self.module_str, self.tag_name)

+         return "<KojiModuleBuilder module: %s, tag: %s>" % (self.module_str, self.tag_name)

  

      @region.cache_on_arguments()

      def getPerms(self):

-         return dict([(p['name'], p['id']) for p in self.koji_session.getAllPerms()])

+         return dict([(p["name"], p["id"]) for p in self.koji_session.getAllPerms()])

  

      @module_build_service.utils.retry(wait_on=(IOError, koji.GenericError))

      def buildroot_ready(self, artifacts=None):

@@ -201,24 +209,22 @@ 

          """

          assert self.module_target, "Invalid build target"

  

-         tag_id = self.module_target['build_tag']

+         tag_id = self.module_target["build_tag"]

          repo = self.koji_session.getRepo(tag_id)

          builds = [self.koji_session.getBuild(a, strict=True) for a in artifacts or []]

-         log.info("%r checking buildroot readiness for "

-                  "repo: %r, tag_id: %r, artifacts: %r, builds: %r" % (

-                      self, repo, tag_id, artifacts, builds))

+         log.info(

+             "%r checking buildroot readiness for repo: %r, tag_id: %r, artifacts: %r, builds: %r"

+             % (self, repo, tag_id, artifacts, builds)

+         )

  

          if not repo:

              log.info("Repo is not generated yet, buildroot is not ready yet.")

              return False

  

-         ready = bool(koji.util.checkForBuilds(

-             self.koji_session,

-             tag_id,

-             builds,

-             repo['create_event'],

-             latest=True,

-         ))

+         ready = bool(

+             koji.util.checkForBuilds(

+                 self.koji_session, tag_id, builds, repo["create_event"], latest=True)

+         )

          if ready:

              log.info("%r buildroot is ready" % self)

          else:

@@ -239,19 +245,22 @@ 

              # Get all the RPMs and builds of the reusable module in Koji

              rpms, builds = koji_session.listTaggedRPMS(reusable_module.koji_tag, latest=True)

              # Convert the list to a dict where each key is the build_id

-             builds = {build['build_id']: build for build in builds}

+             builds = {build["build_id"]: build for build in builds}

              # Create a mapping of package (SRPM) to the RPMs in NVR format

              package_to_rpms = {}

              for rpm in rpms:

-                 package = builds[rpm['build_id']]['name']

+                 package = builds[rpm["build_id"]]["name"]

                  if package not in package_to_rpms:

                      package_to_rpms[package] = []

                  package_to_rpms[package].append(kobo.rpmlib.make_nvr(rpm))

  

              components_in_module = [c.package for c in module_build.component_builds]

              reusable_components = get_reusable_components(

-                 db_session, module_build, components_in_module,

-                 previous_module_build=reusable_module)

+                 db_session,

+                 module_build,

+                 components_in_module,

+                 previous_module_build=reusable_module,

+             )

              # Loop through all the reusable components to find if any of their RPMs are

              # being filtered

              for reusable_component in reusable_components:

@@ -261,7 +270,7 @@ 

                  # We must get the component name from the NVR and not from

                  # reusable_component.package because macros such as those used

                  # by SCLs can change the name of the underlying build

-                 component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)['name']

+                 component_name = kobo.rpmlib.parse_nvr(reusable_component.nvr)["name"]

  

                  if component_name not in package_to_rpms:

                      continue

@@ -270,13 +279,13 @@ 

                  for nvr in package_to_rpms[component_name]:

                      parsed_nvr = kobo.rpmlib.parse_nvr(nvr)

                      # Don't compare with the epoch

-                     parsed_nvr['epoch'] = None

+                     parsed_nvr["epoch"] = None

                      # Loop through all the filtered RPMs to find a match with the reusable

                      # component's RPMs.

                      for nvr2 in list(filtered_rpms):

                          parsed_nvr2 = kobo.rpmlib.parse_nvr(nvr2)

                          # Don't compare with the epoch

-                         parsed_nvr2['epoch'] = None

+                         parsed_nvr2["epoch"] = None

                          # Only remove the filter if we are going to reuse a component with

                          # the same exact NVR

                          if parsed_nvr == parsed_nvr2:

@@ -299,10 +308,10 @@ 

          # Taken from Karsten's create-distmacro-pkg.sh

          # - however removed any provides to system-release/redhat-release

  

-         name = 'module-build-macros'

+         name = "module-build-macros"

          version = "0.1"

          release = "1"

-         today = datetime.date.today().strftime('%a %b %d %Y')

+         today = datetime.date.today().strftime("%a %b %d %Y")

          mmd = module_build.mmd()

  

          # Generate "Conflicts: name = version-release". This is workaround for

@@ -320,99 +329,107 @@ 

                      module_build, req_data["filtered_rpms"])

              else:

                  filtered_rpms = req_data["filtered_rpms"]

-             filter_conflicts.extend(map(

-                 KojiModuleBuilder.format_conflicts_line, filtered_rpms))

+             filter_conflicts.extend(map(KojiModuleBuilder.format_conflicts_line, filtered_rpms))

  

-             if req_name in conf.base_module_names and 'ursine_rpms' in req_data:

+             if req_name in conf.base_module_names and "ursine_rpms" in req_data:

                  comments = (

-                     '# Filter out RPMs from stream collision modules found from ursine content'

-                     ' for base module {}:'.format(req_name),

-                     '# ' + ', '.join(req_data['stream_collision_modules']),

+                     ("# Filter out RPMs from stream collision modules found from ursine content"

+                      " for base module {}:".format(req_name)),

+                     "# " + ", ".join(req_data["stream_collision_modules"]),

+                 )

+                 filter_conflicts.extend(

+                     chain(

+                         comments,

+                         map(KojiModuleBuilder.format_conflicts_line, req_data["ursine_rpms"]),

+                     )

                  )

-                 filter_conflicts.extend(chain(

-                     comments,

-                     map(KojiModuleBuilder.format_conflicts_line, req_data['ursine_rpms'])

-                 ))

  

-         spec_content = """

- %global dist {disttag}

- %global modularitylabel {module_name}:{module_stream}:{module_version}:{module_context}

- %global _module_name {module_name}

- %global _module_stream {module_stream}

- %global _module_version {module_version}

- %global _module_context {module_context}

+         spec_content = textwrap.dedent("""

+             %global dist {disttag}

+             %global modularitylabel {module_name}:{module_stream}:{module_version}:{module_context}

+             %global _module_name {module_name}

+             %global _module_stream {module_stream}

+             %global _module_version {module_version}

+             %global _module_context {module_context}

  

- Name:       {name}

- Version:    {version}

- Release:    {release}%dist

- Summary:    Package containing macros required to build generic module

- BuildArch:  noarch

+             Name:       {name}

+             Version:    {version}

+             Release:    {release}%dist

+             Summary:    Package containing macros required to build generic module

+             BuildArch:  noarch

  

- Group:      System Environment/Base

- License:    MIT

- URL:        http://fedoraproject.org

+             Group:      System Environment/Base

+             License:    MIT

+             URL:        http://fedoraproject.org

  

- Source1:    macros.modules

+             Source1:    macros.modules

  

- {filter_conflicts}

+             {filter_conflicts}

  

- %description

- This package is used for building modules with a different dist tag.

- It provides a file /usr/lib/rpm/macros.d/macro.modules and gets read

- after macro.dist, thus overwriting macros of macro.dist like %%dist

- It should NEVER be installed on any system as it will really mess up

-  updates, builds, ....

+             %description

+             This package is used for building modules with a different dist tag.

+             It provides a file /usr/lib/rpm/macros.d/macro.modules and gets read

+             after macro.dist, thus overwriting macros of macro.dist like %%dist

+             It should NEVER be installed on any system as it will really mess up

+             updates, builds, ....

  

  

- %build

+             %build

  

- %install

- mkdir -p %buildroot/etc/rpm 2>/dev/null |:

- cp %SOURCE1 %buildroot/etc/rpm/macros.zz-modules

- chmod 644 %buildroot/etc/rpm/macros.zz-modules

+             %install

+             mkdir -p %buildroot/etc/rpm 2>/dev/null |:

+             cp %SOURCE1 %buildroot/etc/rpm/macros.zz-modules

+             chmod 644 %buildroot/etc/rpm/macros.zz-modules

  

  

- %files

- /etc/rpm/macros.zz-modules

+             %files

+             /etc/rpm/macros.zz-modules

  

  

  

- %changelog

- * {today} Fedora-Modularity - {version}-{release}{disttag}

- - autogenerated macro by Module Build Service (MBS)

- """.format(disttag=disttag, today=today, name=name, version=version,

-            release=release,

-            module_name=module_build.name,

-            module_stream=module_build.stream,

-            module_version=module_build.version,

-            module_context=module_build.context,

-            filter_conflicts='\n'.join(filter_conflicts))

+             %changelog

+             * {today} Fedora-Modularity - {version}-{release}{disttag}

+             - autogenerated macro by Module Build Service (MBS)

+         """).format(

+             disttag=disttag,

+             today=today,

+             name=name,

+             version=version,

+             release=release,

+             module_name=module_build.name,

+             module_stream=module_build.stream,

+             module_version=module_build.version,

+             module_context=module_build.context,

+             filter_conflicts="\n".join(filter_conflicts),

+         )

  

          modulemd_macros = ""

          rpm_buildopts = mmd.get_rpm_buildopts()

          if rpm_buildopts:

-             modulemd_macros = rpm_buildopts.get('macros')

- 

-         macros_content = """

- 

- # General macros set by MBS

- 

- %dist {disttag}

- %modularitylabel {module_name}:{module_stream}:{module_version}:{module_context}

- %_module_build 1

- %_module_name {module_name}

- %_module_stream {module_stream}

- %_module_version {module_version}

- %_module_context {module_context}

- 

- # Macros set by module author:

- 

- {modulemd_macros}

- """.format(disttag=disttag, module_name=module_build.name,

-            module_stream=module_build.stream,

-            module_version=module_build.version,

-            module_context=module_build.context,

-            modulemd_macros=modulemd_macros)

+             modulemd_macros = rpm_buildopts.get("macros")

+ 

+         macros_content = textwrap.dedent("""

+             # General macros set by MBS

+ 

+             %dist {disttag}

+             %modularitylabel {module_name}:{module_stream}:{module_version}:{module_context}

+             %_module_build 1

+             %_module_name {module_name}

+             %_module_stream {module_stream}

+             %_module_version {module_version}

+             %_module_context {module_context}

+ 

+             # Macros set by module author:

+ 

+             {modulemd_macros}

+         """).format(

+             disttag=disttag,

+             module_name=module_build.name,

+             module_stream=module_build.stream,

+             module_version=module_build.version,

+             module_context=module_build.context,

+             modulemd_macros=modulemd_macros,

+         )

  

          td = tempfile.mkdtemp(prefix="module_build_service-build-macros")

          fd = open(os.path.join(td, "%s.spec" % name), "w")

@@ -426,11 +443,20 @@ 

          log.debug("Building %s.spec" % name)

  

          # We are not interested in the rpmbuild stdout...

-         null_fd = open(os.devnull, 'w')

-         execute_cmd(['rpmbuild', '-bs', '%s.spec' % name,

-                      '--define', '_topdir %s' % td,

-                      '--define', '_sourcedir %s' % sources_dir],

-                     cwd=td, stdout=null_fd)