#91 Remove blocking call to sqlite by using aiosqlite module.
Merged 2 months ago by pingou. Opened 2 months ago by cverna.
cverna/mdapi python3_friendly  into  master

file modified
+1

@@ -10,3 +10,4 @@ 

  alembic.ini

  .tox/

  .pytest_cache/

+ venv/

file modified
+6 -4

@@ -1,14 +1,16 @@ 

  # This Dockerfile is used to build the mdapi service on Openshift

  # mdapi.cfg configuration is managed by Openshift as a configmap

- FROM registry.fedoraproject.org/fedora:latest

+ FROM fedora:31

  

  LABEL maintainer "Clément Verna <cverna@fedoraproject.org>"

  

  EXPOSE 8080

  

- RUN dnf -y install python3-aiohttp python3-werkzeug python3-requests python3-sqlalchemy python3-fedora-messaging

+ RUN dnf -y install python3-aiohttp python3-werkzeug python3-requests python3-fedora-messaging python3-uvloop python3-pip python3-gunicorn\

+     && dnf clean all \

+     && pip3 install aiosqlite

  

- USER 1001

  ENV MDAPI_CONFIG=/etc/mdapi/mdapi.cfg

  COPY . /code

- ENTRYPOINT ["/code/mdapi-run"]

+ WORKDIR /code

+ ENTRYPOINT ["gunicorn", "mdapi.server:init_app", "--bind", "0.0.0.0:8080", "--worker-class", "aiohttp.GunicornUVLoopWebWorker", "-w", "2"]

file modified
+49 -55

@@ -37,7 +37,6 @@ 

  import argparse

  import contextlib

  import itertools

- import multiprocessing

  import os

  import shutil

  import tempfile

@@ -45,15 +44,14 @@ 

  import hashlib

  import xml.etree.ElementTree as ET

  import sys

+ import sqlite3

  

  

  import requests

  

- from sqlalchemy import text

  from fedora_messaging.api import Message, publish

  from fedora_messaging.exceptions import PublishReturned, ConnectionException

  

- import mdapi.lib as mdapilib

  

  KOJI_REPO = 'https://kojipkgs.fedoraproject.org/repos/'

  PKGDB2_URL = 'https://admin.fedoraproject.org/pkgdb/'

@@ -153,15 +151,14 @@ 

      ''' Return the list of Fedora branches corresponding to the given

      status.

      '''

-     url = PKGDB2_URL + 'api/collections?clt_status=%s' % status

+     url = PKGDB2_URL + f'api/collections?clt_status={status}'

      response = requests.get(url, verify=PKGDB2_VERIFY)

      data = response.json()

      return data['collections']

  

  

  def download_db(name, repomd_url, archive):

-     print('%s Downloading file: %s to %s' % (

-         name.ljust(padding), repomd_url, archive))

+     print(f'{name.ljust(padding)} Downloading file: {repomd_url} to {archive}')

      response = requests.get(repomd_url, verify=DL_VERIFY)

      with open(archive, 'wb') as stream:

          stream.write(response.content)

@@ -169,7 +166,7 @@ 

  

  def decompress_db(name, archive, location):

      ''' Decompress the given XZ archive at the specified location. '''

-     print('%s Extracting %s to %s' % (name.ljust(padding), archive, location))

+     print(f'{name.ljust(padding)} Extracting {archive} to {location}')

      if archive.endswith('.xz'):

          import lzma

          with contextlib.closing(lzma.LZMAFile(archive)) as stream_xz:

@@ -196,14 +193,15 @@ 

  

  

  def compare_dbs(name, db1, db2, cache1, cache2):

-     print('%s Comparing %s and %s' % (name.ljust(padding), db1, db2))

+     print(f'{name.ljust(padding)} Comparing {db1} and {db2}')

  

      def get_table_names(uri):

-         with mdapilib.session_manager('sqlite:///' + uri) as session:

-             for name in session.connection().engine.table_names():

-                 if name == 'db_info':

-                     continue

-                 yield name

+         conn = sqlite3.connect(uri)

+         for name in conn.execute("SELECT name FROM sqlite_master WHERE type='table'"):

+             if name[0] == 'db_info':

+                 continue

+             yield name[0]

+         conn.close()

  

      def row_to_package(row):

          if '/' in row[0]:

@@ -213,30 +211,27 @@ 

          return name.split('(')[0]

  

      def get_all_rows(uri, table, cache):

-         query = text(queries.get(table, default_query).format(table=table))

-         with mdapilib.session_manager('sqlite:///' + uri) as session:

-             engine = session.connection().engine

-             for i, row in enumerate(engine.execute(query)):

-                 if table in cache_dependant_tables:

-                     row = list(row)  # lists support item assignment

-                     if row[0] in cache:

-                         row[0] = cache[row[0]]

-                         yield tuple(row)

-                     else:

-                         print("%s ! %r does not appear in the "

-                               "%r cache for %r.  Dropping "

-                               "from comparison." % (

-                                   name.ljust(padding), row[0], table, uri))

-                 else:

+         conn = sqlite3.connect(uri)

+         query = queries.get(table, default_query).format(table=table)

+         for i, row in enumerate(conn.execute(query)):

+             if table in cache_dependant_tables:

+                 row = list(row)  # lists support item assignment

+                 if row[0] in cache:

+                     row[0] = cache[row[0]]

                      yield tuple(row)

- 

+                 else:

+                     print(f"{name.ljust(padding)} ! {row[0]!r} does not appear in the "

+                           f"{table!r} cache for {uri}. Dropping from comparison.")

+             else:

+                 yield tuple(row)

+         conn.close()

  

      def build_cache(uri, cache):

-         query = text(packages_cache_builder.format(table=table))

-         with mdapilib.session_manager('sqlite:///' + uri) as session:

-             engine = session.connection().engine

-             for pkgId, pkgname in engine.execute(query):

-                 cache[pkgId] = pkgname

+         conn = sqlite3.connect(uri)

+         query = queries.get(table, default_query).format(table=table)

+         for pkgId, pkgname in conn.execute(query):

+             cache[pkgId] = pkgname

+         conn.close()

  

      tables1 = list(get_table_names(db1))

      tables2 = list(get_table_names(db2))

@@ -248,7 +243,7 @@ 

          # We have never downloaded this before...

          # so we have nothing to compare it against.  Just return and say there

          # are "no differences".

-         print('%s Empty!  %s  Cannot compare.' % (name.ljust(padding), db2))

+         print(f'{name.ljust(padding)} Empty! {db2} Cannot compare.')

          return set()

  

      assert len(tables1) == len(tables2), "Cannot compare disparate dbs."

@@ -270,6 +265,7 @@ 

          # Same goes for the 'packages' table in the 'other' db.

          ('other', 'packages'),

      ]

+ 

      def should_compare(table):

          for test, target in ignored_db_tables:

              if test in db1 and table == target:

@@ -293,11 +289,11 @@ 

  

  

  def publish_changes(name, packages, repomd_url):

-     print('%s Publishing differences to fedora messaging:' % (name.ljust(padding)))

+     print(f'{name.ljust(padding)} Publishing differences to fedora messaging:')

  

      change = bool(packages)

      if not change:

-         print('%s No real changes.  Skipping fedora messaging.' % (name.ljust(padding)))

+         print(f'{name.ljust(padding)} No real changes.  Skipping fedora messaging.')

          return

  

      # Just publish the suffix of the URL.  The prefix is dl.fedoraproject.org

@@ -306,7 +302,7 @@ 

      # download.fedoraproject.org.. so, just obscure *exactly* which repo we're

      # talking about.

      url = '/'.join(repomd_url.split('/')[4:])

-     print("%s   url %s" % (name.ljust(padding), url))

+     print(f"{name.ljust(padding)} url {url}")

  

      try:

          msg = Message(

@@ -327,7 +323,7 @@ 

  

  

  def install_db(name, src, dest):

-     print('%s Installing %s to %s.' % (name.ljust(padding), src, dest))

+     print(f'{name.ljust(padding)} Installing {src} to {dest}.')

      shutil.move(src, dest)

  

  

@@ -365,8 +361,7 @@ 

      repomd_url = url + '/repomd.xml'

      response = requests.get(repomd_url, verify=DL_VERIFY)

      if not bool(response):

-         print('%s !! Failed to get %r %r' % (

-             name.ljust(padding), repomd_url, response))

+         print(f'{name.ljust(padding)} !! Failed to get {repomd_url!r} {response!r}')

          return

  

      # Parse the xml doc and get a list of locations and their shasum.

@@ -385,7 +380,7 @@ 

      files = ((f, s, t) for f, s, t in files if '.sqlite' in f)

  

      # We need to ensure the primary db comes first so we can build a pkey cache

-     primary_first = lambda item: not 'primary' in item[0]

+     primary_first = lambda item: 'primary' not in item[0]

      files = sorted(files, key=primary_first)

  

      # Primary-key caches built from the primary dbs so we can make sense

@@ -393,7 +388,7 @@ 

      cache1, cache2 = {}, {}

  

      if not files:

-         print('No sqlite database could be found in %s' % url)

+         print(f'No sqlite database could be found in {url}')

  

      for filename, shasum, shatype in files:

          repomd_url = url + '/' + filename

@@ -401,16 +396,16 @@ 

          # First, determine if the file has changed by comparing hash

          db = None

          if 'primary.sqlite' in filename:

-             db = 'mdapi-%s-primary.sqlite' % name

+             db = f'mdapi-{name}-primary.sqlite'

          elif 'filelists.sqlite' in filename:

-             db = 'mdapi-%s-filelists.sqlite' % name

+             db = f'mdapi-{name}-filelists.sqlite'

          elif 'other.sqlite' in filename:

-             db = 'mdapi-%s-other.sqlite' % name

+             db = f'mdapi-{name}-other.sqlite'

  

          # Have we downloaded this before?  Did it change?

          destfile = os.path.join(destfolder, db)

          if not needs_update(destfile, shasum, shatype):

-             print('%s No change of %s' % (name.ljust(padding), repomd_url))

+             print(f'{name.ljust(padding)} No change of {repomd_url}')

              continue

  

          # If it has changed, then download it and move it into place.

@@ -469,15 +464,14 @@ 

          version = release['version']

          if version == 'devel':

              version = 'rawhide'

-         url = '%s/pub/fedora/linux/' \

-             'development/%s/Everything/x86_64/os/repodata' % (DL_SERVER, version)

+         url = f'{DL_SERVER}/pub/fedora/linux/development/{version}/Everything/x86_64/os/repodata'

          print(release['koji_name'], version, release['status'], url)

          repositories.append(

              (url, release['koji_name'])

          )

  

          url = url.replace('/x86_64/os/', '/source/tree/')

-         repositories.append((url, 'src_%s' % release['koji_name']))

+         repositories.append((url, f'src_{release["koji_name"]}'))

  

      urls = {

          'Fedora':

@@ -509,18 +503,18 @@ 

                  url = url.replace('/x86_64/', '/Everything/x86_64/')

              else:

                  name = epel_repos[idx] % release['koji_name']

-             rurl =  url % (DL_SERVER, version)

+             rurl = url % (DL_SERVER, version)

              repositories.append((rurl, name))

  

              rurl = rurl.replace('/x86_64/os', '/source/tree')

-             repositories.append((rurl, 'src_%s' % name))

+             repositories.append((rurl, f'src_{name}'))

  

      # In parallel

-     #p = multiprocessing.Pool(10)

-     #p.map(process_repo, itertools.product(

+     # p = multiprocessing.Pool(10)

+     # p.map(process_repo, itertools.product(

      #    [CONFIG.get('DB_FOLDER', '/var/tmp')],

      #    repositories)

-     #)

+     # )

  

      # In serial

      sleep_for = CONFIG.get('CRON_SLEEP', 30)

file modified
+2 -4

@@ -1,5 +1,3 @@ 

- #!/usr/bin/env python3

+ #!/bin/bash

  

- from mdapi.server import main

- 

- main()

+ gunicorn  mdapi.server:init_app --bind 0.0.0.0:8080 --worker-class aiohttp.GunicornUVLoopWebWorker

file modified
+98 -339

@@ -1,6 +1,6 @@ 

  # -*- coding: utf-8 -*-

  #

- # Copyright © 2015  Red Hat, Inc.

+ # Copyright © 2015-2019  Red Hat, Inc.

  #

  # This copyrighted material is made available to anyone wishing to use,

  # modify, copy, or redistribute it subject to the terms and conditions

@@ -22,16 +22,27 @@ 

  '''

  Top level of the mdapi aiohttp application.

  '''

- import functools

- import json

  import logging

  import os

  

- import asyncio

+ import aiosqlite

  import werkzeug

+ 

  from aiohttp import web

  

- import mdapi.lib as mdapilib

+ from mdapi.db import (

+         GET_PACKAGE,

+         GET_PACKAGE_INFO,

+         GET_CO_PACKAGE,

+         GET_PACKAGE_BY_SRC,

+         GET_PACKAGE_BY,

+         GET_FILES,

+         GET_CHANGELOGS,

+         Packages,

+         Dependencies,

+         FileList,

+         ChangeLog

+ )

  

  

  CONFIG = dict()

@@ -46,46 +57,11 @@ 

          exec(compile(

              config_file.read(), os.environ['MDAPI_CONFIG'], 'exec'), CONFIG)

  

- indexfile = os.path.join(

-     os.path.dirname(os.path.abspath(__file__)), 'index.html')

- INDEX = ''

- with open(indexfile) as stream:

-     INDEX = stream.read()

-     INDEX = INDEX.replace('$PREFIX', CONFIG.get('PREFIX', ''))

- 

  

  _log = logging.getLogger(__name__)

  

  

- def allows_jsonp(function):

-     ''' Add support for JSONP queries to the endpoint decorated. '''

- 

-     @functools.wraps(function)

-     def wrapper(request, *args, **kwargs):

-         ''' Actually does the job with the arguments provided.

- 

-         :arg request: the request that was called that we want to add JSONP

-         support to

-         :type request: aiohttp.web_request.Request

- 

-         '''

-         response = yield from function(request, *args, **kwargs)

-         url_arg = request.query

-         callback = url_arg.get('callback')

-         if callback and request.method == 'GET':

-             if isinstance(callback, list):

-                 callback = callback[0]

-             response.mimetype = 'application/javascript'

-             response.content_type = 'application/javascript'

-             response.text = '%s(%s);' % (callback, response.text)

- 

-         return response

- 

-     return wrapper

- 

- 

- @asyncio.coroutine

- def _get_pkg(branch, name=None, action=None, srcname=None):

+ async def _get_pkg(branch, name=None, action=None, srcname=None):

      ''' Return the pkg information for the given package in the specified

      branch or raise an aiohttp exception.

      '''

@@ -95,56 +71,45 @@ 

      pkg = None

      wrongdb = False

      for repotype in ['updates-testing', 'updates', 'testing', None]:

- 

-         if repotype:

-             dbfile = '%s/mdapi-%s-%s-primary.sqlite' % (

-                 CONFIG['DB_FOLDER'], branch, repotype)

-         else:

-             dbfile = '%s/mdapi-%s-primary.sqlite' % (

-                 CONFIG['DB_FOLDER'], branch)

+         dbfile = f'{CONFIG["DB_FOLDER"]}/mdapi-{branch}{"-"+repotype if repotype else ""}'\

+                  '-primary.sqlite'

  

          if not os.path.exists(dbfile):

              wrongdb = True

              continue

  

          wrongdb = False

- 

-         session = yield from mdapilib.create_session(

-             'sqlite:///%s' % dbfile)

-         if name:

+         async with aiosqlite.connect(f'{dbfile}') as db:

              if action:

-                 pkg = yield from mdapilib.get_package_by(

-                     session, action, name)

+                 # It is safe to format the query since the action does not come from the

+                 # user.

+                 query = GET_PACKAGE_BY.format(action)

+                 async with db.execute(query, (name,)) as cursor:

+                     pkg = await cursor.fetchall()

+                 if pkg:

+                     pkg = [Packages(*item) for item in pkg]

+                     break

+             elif srcname:

+                 async with db.execute(GET_PACKAGE_BY_SRC, (srcname+'%',)) as cursor:

+                     pkg = await cursor.fetchone()

+                 if pkg:

+                     pkg = Packages(*pkg)

+                     break

              else:

-                 pkg = yield from mdapilib.get_package(session, name)

-         elif srcname:

-             pkg = yield from mdapilib.get_package_by_src(session, srcname)

-         session.close()

-         if pkg:

-             break

- 

+                 async with db.execute(GET_PACKAGE, (name,)) as cursor:

+                     pkg = await cursor.fetchone()

+                 if pkg:

+                     pkg = Packages(*pkg)

+                     break

      if wrongdb:

          raise web.HTTPBadRequest()

  

      if not pkg:

          raise web.HTTPNotFound()

- 

      return (pkg, repotype)

  

  

- def _get_pretty(request):

-     pretty = False

-     params = request.query

-     if params.get('pretty') in ['1', 'true']:

-         pretty = True

-     # Assume pretty if html is requested and pretty is not disabled

-     elif 'text/html' in request.headers.get('ACCEPT', ''):

-         pretty = True

-     return pretty

- 

- 

- @asyncio.coroutine

- def _expand_pkg_info(pkgs, branch, repotype=None):

+ async def _expand_pkg_info(pkgs, branch, repotype=None):

      ''' Return a JSON blob containing all the information we want to return

      for the provided package or packages.

      '''

@@ -155,292 +120,86 @@ 

      output = []

      for pkg in pkgs:

          out = pkg.to_json()

-         dbfile = '%s/mdapi-%s%s-primary.sqlite' % (

-             CONFIG['DB_FOLDER'], branch, '-%s' % repotype if repotype else '')

- 

-         session = yield from mdapilib.create_session(

-             'sqlite:///%s' % dbfile)

-         # Fill in some extra info

- 

-         # Basic infos, always present regardless of the version of the repo

-         for datatype in ['conflicts', 'obsoletes', 'provides', 'requires']:

-             data = yield from mdapilib.get_package_info(

-                 session, pkg.pkgKey, datatype.capitalize())

-             if data:

-                 out[datatype] = [item.to_json() for item in data]

+         dbfile = f'{CONFIG["DB_FOLDER"]}/mdapi-{branch}{"-"+repotype if repotype else ""}'\

+                  '-primary.sqlite'

+ 

+         async with aiosqlite.connect(f'{dbfile}') as db:

+             # Fill in some extra info

+             # Basic infos, always present regardless of the version of the repo

+             for datatype in ['conflicts',

+                              'obsoletes',

+                              'provides',

+                              'requires',

+                              'enhances',

+                              'recommends',

+                              'suggests',

+                              'supplements']:

+                 # It is safe to format the query since the datatype does not come from the

+                 # user.

+                 query = GET_PACKAGE_INFO.format(datatype)

+                 async with db.execute(query, (pkg.pkgKey,)) as cursor:

+                     data = await cursor.fetchall()

+                 if data:

+                     out[datatype] = [Dependencies(*item).to_json() for item in data]

+                 else:

+                     out[datatype] = data

+ 

+             # Add the list of packages built from the same src.rpm

+             if pkg.rpm_sourcerpm:

+                 async with db.execute(GET_CO_PACKAGE, (pkg.rpm_sourcerpm,)) as cursor:

+                     copkgs = await cursor.fetchall()

+                 out['co-packages'] = list(set([

+                     cpkg[2] for cpkg in copkgs

+                 ]))

              else:

-                 out[datatype] = data

+                 out['co-packages'] = []

+             out['repo'] = repotype if repotype else 'release'

+             output.append(out)

  

-         # New meta-data present for soft dependency management in RPM

-         for datatype in [

-                 'enhances', 'recommends', 'suggests', 'supplements']:

-             data = yield from mdapilib.get_package_info(

-                 session, pkg.pkgKey, datatype.capitalize())

-             if data:

-                 out[datatype] = [item.to_json() for item in data]

-             else:

-                 out[datatype] = data

- 

-         # Add the list of packages built from the same src.rpm

-         if pkg.rpm_sourcerpm:

-             copkgs = yield from mdapilib.get_co_packages(

-                 session, pkg.rpm_sourcerpm)

-             out['co-packages'] = list(set([

-                 cpkg.name for cpkg in copkgs

-             ]))

-         else:

-             out['co-packages'] = []

-         out['repo'] = repotype if repotype else 'release'

-         session.close()

-         output.append(out)

      if singleton:

          return output[0]

      else:

          return output

  

  

- @asyncio.coroutine

- @allows_jsonp

- def get_pkg(request):

-     _log.info('get_pkg %s', request)

-     branch = request.match_info.get('branch')

-     pretty = _get_pretty(request)

-     name = request.match_info.get('name')

-     pkg, repotype = yield from _get_pkg(branch, name)

- 

-     output = yield from _expand_pkg_info(pkg, branch, repotype)

- 

-     args = {}

-     if pretty:

-         args = dict(sort_keys=True, indent=4, separators=(',', ': '))

- 

-     output = web.Response(

-         body=json.dumps(output, **args).encode('utf-8'),

-         content_type='application/json')

-     return output

- 

- 

- @asyncio.coroutine

- @allows_jsonp

- def get_src_pkg(request):

-     _log.info('get_src_pkg %s', request)

-     branch = request.match_info.get('branch')

-     pretty = _get_pretty(request)

-     name = request.match_info.get('name')

-     pkg, repotype = yield from _get_pkg(branch, srcname=name)

- 

-     output = yield from _expand_pkg_info(pkg, branch, repotype)

- 

-     args = {}

-     if pretty:

-         args = dict(sort_keys=True, indent=4, separators=(',', ': '))

- 

-     return web.Response(

-         body=json.dumps(output, **args).encode('utf-8'),

-         content_type='application/json')

- 

- 

- @asyncio.coroutine

- @allows_jsonp

- def get_pkg_files(request):

-     _log.info('get_pkg_files %s', request)

-     branch = request.match_info.get('branch')

-     name = request.match_info.get('name')

-     pretty = _get_pretty(request)

-     pkg, repotype = yield from _get_pkg(branch, name)

- 

-     dbfile = '%s/mdapi-%s%s-filelists.sqlite' % (

-         CONFIG['DB_FOLDER'], branch, '-%s' % repotype if repotype else '')

+ async def _get_files(pkg_id, branch, repotype):

+     ''' Return the files list for the given package in the specified

+     branch.

+     '''

+     dbfile = f'{CONFIG["DB_FOLDER"]}/mdapi-{branch}{"-"+repotype if repotype else ""}'\

+              '-filelists.sqlite'

      if not os.path.exists(dbfile):

          raise web.HTTPBadRequest()

  

-     session2 = yield from mdapilib.create_session(

-         'sqlite:///%s' % dbfile)

-     filelist = yield from mdapilib.get_files(session2, pkg.pkgId)

-     session2.close()

+     async with aiosqlite.connect(f"{dbfile}") as db:

+         async with db.execute(GET_FILES, (pkg_id,)) as cursor:

+             filelists = await cursor.fetchall()

+ 

+     filelists = [FileList(*item) for item in filelists]

  

      output = {

-         'files': [fileinfo.to_json() for fileinfo in filelist],

+         'files': [fileinfo.to_json() for fileinfo in filelists],

          'repo': repotype if repotype else 'release',

      }

-     args = {}

-     if pretty:

-         args = dict(sort_keys=True, indent=4, separators=(',', ': '))

- 

-     return web.Response(

-         body=json.dumps(output, **args).encode('utf-8'),

-         content_type='application/json')

- 

+     return output

  

- @asyncio.coroutine

- @allows_jsonp

- def get_pkg_changelog(request):

-     _log.info('get_pkg_changelog %s', request)

-     branch = request.match_info.get('branch')

-     name = request.match_info.get('name')

-     pretty = _get_pretty(request)

-     pkg, repotype = yield from _get_pkg(branch, name)

  

-     dbfile = '%s/mdapi-%s%s-other.sqlite' % (

-         CONFIG['DB_FOLDER'], branch, '-%s' % repotype if repotype else '')

+ async def _get_changelog(pkg_id, branch, repotype):

+     ''' Return the changelog for the given package in the specified

+     branch.

+     '''

+     dbfile = f'{CONFIG["DB_FOLDER"]}/mdapi-{branch}{"-"+repotype if repotype else ""}-other.sqlite'

      if not os.path.exists(dbfile):

          raise web.HTTPBadRequest()

  

-     session2 = yield from mdapilib.create_session(

-         'sqlite:///%s' % dbfile)

-     changelogs = yield from mdapilib.get_changelog(session2, pkg.pkgId)

-     session2.close()

+     async with aiosqlite.connect(f"{dbfile}") as db:

+         async with db.execute(GET_CHANGELOGS, (pkg_id,)) as cursor:

+             changelogs = await cursor.fetchall()

+ 

+     changelogs = [ChangeLog(*item) for item in changelogs]

  

      output = {

          'changelogs': [changelog.to_json() for changelog in changelogs],

          'repo': repotype if repotype else 'release',

      }

-     args = {}

-     if pretty:

-         args = dict(sort_keys=True, indent=4, separators=(',', ': '))

- 

-     return web.Response(

-         body=json.dumps(output, **args).encode('utf-8'),

-         content_type='application/json')

- 

- 

- @asyncio.coroutine

- def list_branches(request):

-     ''' Return the list of all branches currently supported by mdapi

-     '''

-     _log.info('list_branches: %s', request)

-     pretty = _get_pretty(request)

-     output = sorted(list(set([

-         # Remove the front part `mdapi-` and the end part -<type>.sqlite

-         filename.replace('mdapi-', '').rsplit('-', 2)[0].replace(

-             '-updates', '')

-         for filename in os.listdir(CONFIG['DB_FOLDER'])

-         if filename.startswith('mdapi') and filename.endswith('.sqlite')

-     ])))

- 

-     args = {}

-     if pretty:

-         args = dict(sort_keys=True, indent=4, separators=(',', ': '))

- 

-     response = web.Response(body=json.dumps(output, **args).encode('utf-8'),

-                             content_type='application/json')

- 

-     # The decorator doesn't work for this endpoint, so do it manually here

-     # I am not really sure what doesn't work but it seems this endpoint is

-     # returning an object instead of the expected generator despite it being

-     # flagged as an asyncio coroutine

-     url_arg = request.query

-     callback = url_arg.get('callback')

-     if callback and request.method == 'GET':

-         if isinstance(callback, list):

-             callback = callback[0]

-         response.mimetype = 'application/javascript'

-         response.content_type = 'application/javascript'

-         response.text = '%s(%s);' % (callback, response.text)

- 

-     return response

- 

- 

- @asyncio.coroutine

- @allows_jsonp

- def process_dep(request, action):

-     ''' Return the information about the packages having the specified

-     action (provides, requires, obsoletes...)

-     '''

-     _log.info('process_dep %s: %s', action, request)

-     branch = request.match_info.get('branch')

-     pretty = _get_pretty(request)

-     name = request.match_info.get('name')

- 

-     try:

-         pkg, repotype = yield from _get_pkg(branch, name, action=action)

-     except:

-         raise web.HTTPBadRequest()

- 

-     output = yield from _expand_pkg_info(pkg, branch, repotype)

- 

-     args = {}

-     if pretty:

-         args = dict(sort_keys=True, indent=4, separators=(',', ': '))

- 

-     return web.Response(body=json.dumps(output, **args).encode('utf-8'),

-                         content_type='application/json')

- 

- 

- @asyncio.coroutine

- def get_provides(request):

-     return process_dep(request, 'provides')

- 

- 

- @asyncio.coroutine

- def get_requires(request):

-     return process_dep(request, 'requires')

- 

- 

- @asyncio.coroutine

- def get_obsoletes(request):

-     return process_dep(request, 'obsoletes')

- 

- 

- @asyncio.coroutine

- def get_conflicts(request):

-     return process_dep(request, 'conflicts')

- 

- 

- @asyncio.coroutine

- def get_enhances(request):

-     return process_dep(request, 'enhances')

- 

- 

- @asyncio.coroutine

- def get_recommends(request):

-     return process_dep(request, 'recommends')

- 

- 

- @asyncio.coroutine

- def get_suggests(request):

-     return process_dep(request, 'suggests')

- 

- 

- @asyncio.coroutine

- def get_supplements(request):

-     return process_dep(request, 'supplements')

- 

- 

- @asyncio.coroutine

- def index(request):

-     _log.info('index %s', request)

-     return web.Response(

-         body=INDEX.encode('utf-8'),

-         content_type='text/html',

-         charset='utf-8')

- 

- 

- def _set_routes(app):

-     routes = []

-     prefix = CONFIG.get('PREFIX', '')

-     if prefix:

-         routes.append(('', index))

- 

-     routes.extend([

-         ('/', index),

-         ('/branches', list_branches),

-         ('/{branch}/pkg/{name}', get_pkg),

-         ('/{branch}/srcpkg/{name}', get_src_pkg),

- 

-         ('/{branch}/provides/{name}', get_provides),

-         ('/{branch}/requires/{name}', get_requires),

-         ('/{branch}/obsoletes/{name}', get_obsoletes),

-         ('/{branch}/conflicts/{name}', get_conflicts),

- 

-         ('/{branch}/enhances/{name}', get_enhances),

-         ('/{branch}/recommends/{name}', get_recommends),

-         ('/{branch}/suggests/{name}', get_suggests),

-         ('/{branch}/supplements/{name}', get_supplements),

- 

-         ('/{branch}/files/{name}', get_pkg_files),

-         ('/{branch}/changelog/{name}', get_pkg_changelog),

-     ])

-     for route in routes:

-         app.router.add_route('GET', prefix + route[0], route[1])

-     return app

+     return output

file removed
-58

@@ -1,58 +0,0 @@ 

- # -*- coding: utf-8 -*-

- #

- # Copyright © 2015  Red Hat, Inc.

- #

- # This copyrighted material is made available to anyone wishing to use,

- # modify, copy, or redistribute it subject to the terms and conditions

- # of the GNU General Public License v.2, or (at your option) any later

- # version.  This program is distributed in the hope that it will be

- # useful, but WITHOUT ANY WARRANTY expressed or implied, including the

- # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR

- # PURPOSE.  See the GNU General Public License for more details.  You

- # should have received a copy of the GNU General Public License along

- # with this program; if not, write to the Free Software Foundation,

- # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

- #

- # Any Red Hat trademarks that are incorporated in the source

- # code or documentation are not subject to the GNU General Public

- # License and may only be used or replicated with the express permission

- # of Red Hat, Inc.

- #

- 

- '''

- DB mapping for the other sqlite DB.

- '''

- 

- import sqlalchemy as sa

- 

- from sqlalchemy.ext.declarative import declarative_base

- 

- BASE = declarative_base()

- 

- 

- class Package(BASE):

-     ''' Maps the packages table in the primary.sqlite database from

-     repodata to a python object.

-     '''

-     __tablename__ = 'packages'

-     pkgKey = sa.Column(sa.Integer, primary_key=True)

-     pkgId = sa.Column(sa.Text)

- 

- 

- class Changelog(BASE):

-     ''' Maps the packages table in the filelists.sqlite database from

-     repodata to a python object.

-     '''

-     __tablename__ = 'changelog'

-     pkgKey = sa.Column(sa.Integer, primary_key=True)

-     author = sa.Column(sa.Text, primary_key=True)

-     changelog = sa.Column(sa.Text, primary_key=True)

-     date = sa.Column(sa.Integer, primary_key=True)

- 

-     def to_json(self):

-         filelist = {

-             'author': self.author,

-             'changelog': self.changelog,

-             'date': self.date,

-         }

-         return filelist

file added
+196

@@ -0,0 +1,196 @@ 

+ # -*- coding: utf-8 -*-

+ #

+ # Copyright © 2019  Red Hat, Inc.

+ #

+ # This copyrighted material is made available to anyone wishing to use,

+ # modify, copy, or redistribute it subject to the terms and conditions

+ # of the GNU General Public License v.2, or (at your option) any later

+ # version.  This program is distributed in the hope that it will be

+ # useful, but WITHOUT ANY WARRANTY expressed or implied, including the

+ # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR

+ # PURPOSE.  See the GNU General Public License for more details.  You

+ # should have received a copy of the GNU General Public License along

+ # with this program; if not, write to the Free Software Foundation,

+ # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

+ #

+ # Any Red Hat trademarks that are incorporated in the source

+ # code or documentation are not subject to the GNU General Public

+ # License and may only be used or replicated with the express permission

+ # of Red Hat, Inc.

+ #

+ 

+ from dataclasses import dataclass

+ 

+ GET_PACKAGE = """SELECT pkgKey,

+                         pkgId,

+                         name,

+                         rpm_sourcerpm,

+                         epoch,

+                         version,

+                         release,

+                         arch,

+                         summary,

+                         description,

+                         url

+                  FROM packages

+                  WHERE name = ?

+                  ORDER BY epoch DESC, version DESC, release DESC"""

+ 

+ GET_PACKAGE_INFO = """SELECT rowid,

+                              pkgKey,

+                              name,

+                              epoch,

+                              version,

+                              release,

+                              flags

+                       FROM {}

+                       WHERE pkgKey = ?"""

+ 

+ GET_CO_PACKAGE = """SELECT pkgKey,

+                            pkgId,

+                            name,

+                            rpm_sourcerpm,

+                            epoch,

+                            version,

+                            release,

+                            arch,

+                            summary,

+                            description,

+                            url

+                     FROM packages

+                     WHERE rpm_sourcerpm = ?"""

+ 

+ GET_PACKAGE_BY_SRC = """SELECT pkgKey,

+                                pkgId,

+                                name,

+                                rpm_sourcerpm,

+                                epoch,

+                                version,

+                                release,

+                                arch,

+                                summary,

+                                description,

+                                url

+                         FROM packages

+                         WHERE rpm_sourcerpm LIKE ?

+                         ORDER BY epoch DESC, version DESC, release DESC"""

+ 

+ GET_PACKAGE_BY = """SELECT p.pkgKey,

+                            p.pkgId,

+                            p.name,

+                            p.rpm_sourcerpm,

+                            p.epoch,

+                            p.version,

+                            p.release,

+                            p.arch,

+                            p.summary,

+                            p.description,

+                            p.url

+                     FROM packages p

+                     JOIN {} t ON t.pkgKey = p.pkgKey

+                     WHERE t.name = ?

+                     ORDER BY p.epoch DESC, p.version DESC, p.release DESC"""

+ 

+ GET_FILES = """SELECT f.pkgKey,

+                       f.dirname,

+                       f.filenames,

+                       f.filetypes

+                FROM filelist f

+                JOIN packages p ON p.pkgId = ?

+                WHERE f.pkgKey = p.pkgKey

+                ORDER BY f.filenames"""

+ 

+ 

+ GET_CHANGELOGS = """SELECT c.pkgKey,

+                            c.author,

+                            c.changelog,

+                            c.date

+                     FROM changelog c

+                     JOIN packages p ON p.pkgId = ?

+                     WHERE c.pkgKey = p.pkgKey

+                     ORDER BY c.date DESC"""

+ 

+ 

+ @dataclass

+ class Packages:

+     pkgKey: int

+     pkgId: str

+     name: str

+     rpm_sourcerpm: str

+     epoch: str

+     version: str

+     release: str

+     arch: str

+     summary: str

+     description: str

+     url: str

+ 

+     @property

+     def basename(self):

+         return self.rpm_sourcerpm.rsplit("-", 2)[0]

+ 

+     def to_json(self):

+         pkg = {

+             'arch': self.arch,

+             'epoch': self.epoch,

+             'version': self.version,

+             'release': self.release,

+             'summary': self.summary,

+             'description': self.description,

+             'basename': self.basename,

+             'url': self.url,

+         }

+         return pkg

+ 

+ 

+ @dataclass

+ class Dependencies:

+     rowid: int

+     pkgKey: int

+     name: str

+     epoch: str

+     version: str

+     release: str

+     flags: str

+ 

+     def to_json(self):

+         pkg = {

+             'name': self.name,

+             'epoch': self.epoch,

+             'version': self.version,

+             'release': self.release,

+             'flags': self.flags,

+         }

+         return pkg

+ 

+ 

+ @dataclass

+ class FileList:

+     pkgKey: int

+     dirname: str

+     filenames: str

+     filetypes: str

+ 

+     def to_json(self):

+         filelist = {

+             'dirname': self.dirname,

+             'filenames': self.filenames,

+             'filetypes': self.filetypes,

+         }

+         return filelist

+ 

+ 

+ @dataclass

+ class ChangeLog:

+     pkgKey: int

+     author: str

+     changelog: str

+     date: int

+ 

+     def to_json(self):

+         changelog = {

+                 'author': self.author,

+                 'changelog': self.changelog,

+                 'date': self.date,

+         }

+         return changelog

file removed
-58

@@ -1,58 +0,0 @@ 

- # -*- coding: utf-8 -*-

- #

- # Copyright © 2015  Red Hat, Inc.

- #

- # This copyrighted material is made available to anyone wishing to use,

- # modify, copy, or redistribute it subject to the terms and conditions

- # of the GNU General Public License v.2, or (at your option) any later

- # version.  This program is distributed in the hope that it will be

- # useful, but WITHOUT ANY WARRANTY expressed or implied, including the

- # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR

- # PURPOSE.  See the GNU General Public License for more details.  You

- # should have received a copy of the GNU General Public License along

- # with this program; if not, write to the Free Software Foundation,

- # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

- #

- # Any Red Hat trademarks that are incorporated in the source

- # code or documentation are not subject to the GNU General Public

- # License and may only be used or replicated with the express permission

- # of Red Hat, Inc.

- #

- 

- '''

- DB mapping for the filelists sqlite DB.

- '''

- 

- import sqlalchemy as sa

- 

- from sqlalchemy.ext.declarative import declarative_base

- 

- BASE = declarative_base()

- 

- 

- class Package(BASE):

-     ''' Maps the packages table in the primary.sqlite database from

-     repodata to a python object.

-     '''

-     __tablename__ = 'packages'

-     pkgKey = sa.Column(sa.Integer, primary_key=True)

-     pkgId = sa.Column(sa.Text)

- 

- 

- class Filelist(BASE):

-     ''' Maps the packages table in the filelists.sqlite database from

-     repodata to a python object.

-     '''

-     __tablename__ = 'filelist'

-     pkgKey = sa.Column(sa.Integer, primary_key=True)

-     dirname = sa.Column(sa.Text, primary_key=True)

-     filenames = sa.Column(sa.Text, primary_key=True)

-     filetypes = sa.Column(sa.Text, primary_key=True)

- 

-     def to_json(self):

-         filelist = {

-             'dirname': self.dirname,

-             'filenames': self.filenames,

-             'filetypes': self.filetypes,

-         }

-         return filelist

file modified
+13 -13

@@ -68,7 +68,7 @@ 

  

      /branches

  

-     <a href="$PREFIX/branches">/branches</a>

+     <a href="/branches">/branches</a>

  

  

  Note:

@@ -88,7 +88,7 @@ 

  

  So for example, for the kernel in rawhide:

  

-     <a href="$PREFIX/rawhide/pkg/kernel">/rawhide/pkg/kernel</a>

+     <a href="/rawhide/pkg/kernel">/rawhide/pkg/kernel</a>

  

  

  You can also retrieve information about a specific package on a specific

@@ -98,7 +98,7 @@ 

  

  So for example, for the python-natsort in rawhide that only exists as src.rpm:

  

-     <a href="$PREFIX/rawhide/srcpkg/python-natsort">/rawhide/srcpkg/python-natsort</a>

+     <a href="/rawhide/srcpkg/python-natsort">/rawhide/srcpkg/python-natsort</a>

  

  

  Retrieve the list of files in a package

@@ -111,7 +111,7 @@ 

  

  So for example, for the kernel-core in rawhide:

  

-     <a href="$PREFIX/rawhide/files/kernel-core">/rawhide/files/kernel-core</a>

+     <a href="/rawhide/files/kernel-core">/rawhide/files/kernel-core</a>

  

  

  Retrieve the changelog of a package

@@ -124,7 +124,7 @@ 

  

  So for example, for the kernel in rawhide:

  

-     <a href="$PREFIX/rawhide/changelog/kernel">/rawhide/changelog/kernel</a>

+     <a href="/rawhide/changelog/kernel">/rawhide/changelog/kernel</a>

  

  

  Retrieve the packages having a specific property

@@ -143,29 +143,29 @@ 

  Few examples:

  

      packages requiring R in rawhide:

-     <a href="$PREFIX/rawhide/requires/R">/rawhide/requires/R</a>

+     <a href="/rawhide/requires/R">/rawhide/requires/R</a>

        To see what R itself requires, check its information using: <a href="$PREFIX/rawhide/pkg/R">/rawhide/pkg/R</a>

  

      packages providing perl(SetupLog) in rawhide:

-     <a href="$PREFIX/rawhide/provides/perl(SetupLog)">/rawhide/provides/perl(SetupLog)</a>

+     <a href="/rawhide/provides/perl(SetupLog)">/rawhide/provides/perl(SetupLog)</a>

  

      packages obsoleting cabal2spec in rawhide:

-     <a href="$PREFIX/rawhide/obsoletes/cabal2spec">rawhide/obsoletes/cabal2spec</a>

+     <a href="/rawhide/obsoletes/cabal2spec">rawhide/obsoletes/cabal2spec</a>

  

      packages conflicting with mariadb in rawhide:

-     <a href="$PREFIX/rawhide/conflicts/mariadb">rawhide/conflicts/mariadb</a>

+     <a href="/rawhide/conflicts/mariadb">rawhide/conflicts/mariadb</a>

  

      packages enhancing httpd in rawhide:

-     <a href="$PREFIX/rawhide/enhances/httpd">rawhide/enhances/httpd</a>

+     <a href="/rawhide/enhances/httpd">rawhide/enhances/httpd</a>

  

      packages recommending flac in rawhide:

-     <a href="$PREFIX/rawhide/recommends/flac">rawhide/recommends/flac</a>

+     <a href="/rawhide/recommends/flac">rawhide/recommends/flac</a>

  

      packages suggesting R-tools in rawhide:

-     <a href="$PREFIX/rawhide/suggests/R-tools">rawhide/suggests/R-tools</a>

+     <a href="/rawhide/suggests/R-tools">rawhide/suggests/R-tools</a>

  

      packages supplementing `(hunspell and langpacks-fr)` in rawhide:

-     <a href="$PREFIX/rawhide/supplements/(hunspell and langpacks-fr)">rawhide/supplements/(hunspell and langpacks-fr)</a>

+     <a href="/rawhide/supplements/(hunspell and langpacks-fr)">rawhide/supplements/(hunspell and langpacks-fr)</a>

  

  

  

file removed
-276

@@ -1,276 +0,0 @@ 

- # -*- coding: utf-8 -*-

- #

- # Copyright © 2015  Red Hat, Inc.

- #

- # This copyrighted material is made available to anyone wishing to use,

- # modify, copy, or redistribute it subject to the terms and conditions

- # of the GNU General Public License v.2, or (at your option) any later

- # version.  This program is distributed in the hope that it will be

- # useful, but WITHOUT ANY WARRANTY expressed or implied, including the

- # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR

- # PURPOSE.  See the GNU General Public License for more details.  You

- # should have received a copy of the GNU General Public License along

- # with this program; if not, write to the Free Software Foundation,

- # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

- #

- # Any Red Hat trademarks that are incorporated in the source

- # code or documentation are not subject to the GNU General Public

- # License and may only be used or replicated with the express permission

- # of Red Hat, Inc.

- #

- 

- '''

- MDAPI internal API to interact with the database.

- '''

- 

- import contextlib

- import time

- 

- import asyncio

- import sqlalchemy as sa

- 

- from sqlalchemy.orm import sessionmaker

- from sqlalchemy.orm import scoped_session

- from sqlalchemy.exc import SQLAlchemyError, OperationalError

- 

- import mdapi.changelog as changelog

- import mdapi.filelist as filelist

- import mdapi.primary as primary

- 

- 

- RETRY_ATTEMPT = 3

- 

- 

- @asyncio.coroutine

- def create_session(db_url, debug=False, pool_recycle=3600):

-     """ Create the Session object to use to query the database.

- 

-     :arg db_url: URL used to connect to the database. The URL contains

-     information with regards to the database engine, the host to connect

-     to, the user and password and the database name.

-       ie: <engine>://<user>:<password>@<host>/<dbname>

-     :kwarg debug: a boolean specifying wether we should have the verbose

-         output of sqlalchemy or not.

-     :return a Session that can be used to query the database.

- 

-     """

-     engine = sa.create_engine(

-         db_url, echo=debug, pool_recycle=pool_recycle)

-     scopedsession = scoped_session(sessionmaker(bind=engine))

-     return scopedsession

- 

- 

- @contextlib.contextmanager

- def session_manager(db_url, debug=False, pool_recycle=3600):

-     """ A handy context manager for our sessions. """

-     session = yield from create_session(

-         db_url, debug=debug, pool_recycle=pool_recycle)

-     try:

-         yield session

-         session.commit()

-     except:

-         session.rollback()

-         raise

-     finally:

-         session.close()

- 

- 

- @asyncio.coroutine

- def get_package(session, pkg_name):

-     ''' Return information about a package, if we can find it.

-     '''

-     output = None

-     cnt = 0

-     try:

-         pkg = session.query(

-             primary.Package

-         ).filter(

-             primary.Package.name == pkg_name

-         ).order_by(

-             primary.Package.epoch.desc(),

-             primary.Package.version.desc(),

-             primary.Package.release.desc(),

-         )

-         output = pkg.first()

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             output = yield from get_package(session, pkg_name)

- 

-     return output

- 

- 

- @asyncio.coroutine

- def get_package_by_src(session, pkg_name):

-     ''' Return information about a package, if we can find it.

-     '''

-     # First try if there is a package matching exactly the provided name

-     simple_match = yield from get_package(session, pkg_name)

-     if simple_match and simple_match.basename == pkg_name:

-         return simple_match

- 

-     # If there is not a direct match, look by the sourcerpm name

-     cnt = 0

-     try:

-         pkg = session.query(

-             primary.Package

-         ).filter(

-             primary.Package.rpm_sourcerpm.like('{}%'.format(pkg_name))

-         ).order_by(

-             primary.Package.epoch.desc(),

-             primary.Package.version.desc(),

-             primary.Package.release.desc(),

-         )

-         for pkg in pkg.all():

-             if pkg.basename == pkg_name:

-                 return pkg

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             yield from get_package_by_src(session, pkg_name)

- 

- 

- @asyncio.coroutine

- def get_package_by(session, tablename, key, cnt=None):

-     ''' Return information the package providing the provides, if we can find it.

-     '''

-     table = getattr(primary, tablename.capitalize())

- 

-     output = None

-     cnt = cnt or 0

-     try:

-         pkg = session.query(

-             primary.Package

-         ).filter(

-             table.name == key

-         ).filter(

-             table.pkgKey == primary.Package.pkgKey

-         ).order_by(

-             primary.Package.epoch.desc(),

-             primary.Package.version.desc(),

-             primary.Package.release.desc(),

-         )

-         output = pkg.all()

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             output = yield from get_package_by(

-                 session, tablename, key, cnt=cnt)

- 

-     return output

- 

- 

- @asyncio.coroutine

- def get_package_info(session, pkgKey, tablename):

-     ''' Return the information contained in the specified table for the

-     given package.

-     '''

-     table = getattr(primary, tablename)

-     output = None

-     cnt = 0

-     try:

-         query = session.query(

-             table

-         ).filter(

-             table.pkgKey == pkgKey

-         )

-         output = query.all()

-     except OperationalError:

-         return None

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             output = yield from get_package_info(session, pkgKey, tablename)

- 

-     return output

- 

- 

- @asyncio.coroutine

- def get_co_packages(session, srcpkg_name):

-     ''' Return the name of all the packages coming from the same

-     source-package.

-     '''

-     output = None

-     cnt = 0

-     try:

-         pkg = session.query(

-             primary.Package

-         ).filter(

-             primary.Package.rpm_sourcerpm == srcpkg_name

-         )

-         output = pkg.all()

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             output = yield from get_co_packages(session, srcpkg_name)

- 

-     return output

- 

- 

- @asyncio.coroutine

- def get_files(session, pkg_id):

-     ''' Return the list of all the files in a package given its key.

-     '''

-     output = None

-     cnt = 0

-     try:

-         pkg = session.query(

-             filelist.Filelist

-         ).filter(

-             filelist.Package.pkgId == pkg_id,

-             filelist.Filelist.pkgKey == filelist.Package.pkgKey

-         ).order_by(

-             filelist.Filelist.filenames

-         )

-         output = pkg.all()

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             output = yield from get_files(session, pkg_id)

- 

-     return output

- 

- 

- @asyncio.coroutine

- def get_changelog(session, pkg_id):

-     ''' Return the list of all the changelog in a package given its key.

-     '''

-     output = None

-     cnt = 0

-     try:

-         pkg = session.query(

-             changelog.Changelog

-         ).filter(

-             changelog.Package.pkgId == pkg_id,

-             changelog.Changelog.pkgKey == changelog.Package.pkgKey

-         ).order_by(

-             changelog.Changelog.date.desc()

-         )

-         output = pkg.all()

-     except SQLAlchemyError as err:

-         cnt += 1

-         if cnt > RETRY_ATTEMPT:

-             raise

-         else:

-             time.sleep(0.1)

-             output = yield from get_changelog(session, pkg_id)

- 

-     return output

file removed
-151

@@ -1,151 +0,0 @@ 

- # -*- coding: utf-8 -*-

- #

- # Copyright © 2015  Red Hat, Inc.

- #

- # This copyrighted material is made available to anyone wishing to use,

- # modify, copy, or redistribute it subject to the terms and conditions

- # of the GNU General Public License v.2, or (at your option) any later

- # version.  This program is distributed in the hope that it will be

- # useful, but WITHOUT ANY WARRANTY expressed or implied, including the

- # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR

- # PURPOSE.  See the GNU General Public License for more details.  You

- # should have received a copy of the GNU General Public License along

- # with this program; if not, write to the Free Software Foundation,

- # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.

- #

- # Any Red Hat trademarks that are incorporated in the source

- # code or documentation are not subject to the GNU General Public

- # License and may only be used or replicated with the express permission

- # of Red Hat, Inc.

- #

- 

- '''

- DB mapping for the primary sqlite DB.

- '''

- 

- import sqlalchemy as sa

- 

- from sqlalchemy.ext.declarative import declarative_base

- 

- BASE = declarative_base()

- 

- 

- class Package(BASE):

-     ''' Maps the packages table in the primary.sqlite database from

-     repodata to a python object.

-     '''

-     __tablename__ = 'packages'

-     pkgKey = sa.Column(sa.Integer, primary_key=True)

-     pkgId = sa.Column(sa.Text)

-     name = sa.Column(sa.Text)

-     rpm_sourcerpm = sa.Column(sa.Text)

-     epoch = sa.Column(sa.Text)

-     version = sa.Column(sa.Text)

-     release = sa.Column(sa.Text)

-     arch = sa.Column(sa.Text)

-     summary = sa.Column(sa.Text)

-     description = sa.Column(sa.Text)

-     url = sa.Column(sa.Text)

- 

-     @property

-     def basename(self):

-         ''' Return the base package name using the rpm_sourcerpms info. '''

-         return self.rpm_sourcerpm.rsplit('-', 2)[0]

- 

-     def to_json(self):

-         pkg = {

-             'arch': self.arch,

-             'epoch': self.epoch,

-             'version': self.version,

-             'release': self.release,

-             'summary': self.summary,

-             'description': self.description,

-             'basename': self.basename,

-             'url': self.url,

-         }

-         return pkg

- 

- 

- class BaseDependency(object):

-     ''' Base mapping for the tables in the primary.sqlite database that

-     contain all the dependencies information

-     '''

-     rowid = sa.Column(sa.Integer, primary_key=True)

-     pkgKey = sa.Column(sa.Integer, index=True)

-     name = sa.Column(sa.Text)

-     epoch = sa.Column(sa.Text)

-     version = sa.Column(sa.Text)

-     release = sa.Column(sa.Text)

-     flags = sa.Column(sa.Text)

- 

-     def to_json(self):

-         pkg = {

-             'name': self.name,

-             'epoch': self.epoch,

-             'version': self.version,

-             'release': self.release,

-             'flags': self.flags,

-         }

-         return pkg

- 

- 

- BASEDEP = declarative_base(cls=BaseDependency)

- 

- 

- class Requires(BASEDEP):

-     ''' Maps the requires table in the primary.sqlite database from

-     repodata to a python object.

-     '''

-     __tablename__ = 'requires'