#348 Fix the feed & stream widgets backend
Merged 6 years ago by abompard. Opened 6 years ago by abompard.
abompard/fedora-hubs fix-feed  into  develop

file modified
+1
@@ -8,6 +8,7 @@ 

  

   # Forward traffic on the host to the development server on the guest

   config.vm.network "forwarded_port", guest: 5000, host: 5000

+  config.vm.network "forwarded_port", guest: 8080, host: 8080

  

   # Comment out if you don't want Vagrant to add and remove entries from /etc/hosts for each VM.

   # requires the vagrant-hostmanager plugin to be installed

@@ -8,5 +8,7 @@ 

  hubs_var_dir: "{{ hubs_base_dir }}/var"

  hubs_db_type: sqlite

  hubs_db_password: changeme

- hubs_url_hostname: localhost

+ hubs_url_hostname: "{{ ansible_fqdn }}"

  hubs_url: http{% if not hubs_dev_mode %}s{% endif %}://{{ hubs_url_hostname }}{% if hubs_dev_mode %}:5000{% endif %}

+ hubs_ssl_cert: /etc/pki/tls/certs/{{ hubs_url_hostname }}.crt

+ hubs_ssl_key: /etc/pki/tls/private/{{ hubs_url_hostname }}.key

@@ -0,0 +1,9 @@ 

+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

+ proxy_set_header X-Forwarded-Proto $scheme;

+ proxy_set_header Host $http_host;

+ # we don't want nginx trying to do something clever with

+ # redirects, we set the Host: header above already.

+ proxy_redirect off;

+ # OpenID Connect uses large headers, we need bigger buffers.

+ proxy_buffer_size 128k;

+ proxy_buffers 8 256k;

@@ -13,5 +13,9 @@ 

    service: name=hubs-worker@* state=restarted

    listen: "hubs configuration change"

  

+ - name: restart hubs SSE server

+   service: name=hubs-sse state=restarted

+   listen: "hubs configuration change"

+ 

  # Webserver

  - include: webserver.yml

@@ -49,6 +49,8 @@ 

    command: "{{ hubs_venv_dir }}/bin/python {{ hubs_code_dir }}/populate.py"

    args:

      chdir: "{{ hubs_code_dir }}"

+   environment:

+     HUBS_CONFIG: "{{ hubs_conf_dir }}/hubs_config.py"

    become_user: "{{ main_user }}"

    when: db_creation|succeeded

  

@@ -1,6 +1,8 @@ 

  - name: Create and populate the Fedora Hubs database

    command: "{{ hubs_venv_dir }}/bin/python {{ hubs_code_dir }}/populate.py"

-   become_user: "{{ main_user }}"

    args:

      creates: "{{ hubs_var_dir }}/hubs.db"

      chdir: "{{ hubs_code_dir }}"

+   environment:

+     HUBS_CONFIG: "{{ hubs_conf_dir }}/hubs_config.py"

+   become_user: "{{ main_user }}"

@@ -108,6 +108,7 @@ 

      creates: "{{ hubs_venv_dir }}/lib/python2.7/site-packages/fedora-hubs.egg-link"

  

  - name: Set bin file context in the virtualenv

+   become_user: "{{ main_user }}"

    file:

      path: "{{ hubs_venv_dir }}/bin"

      state: directory
@@ -167,7 +168,6 @@ 

  - name: Disable the system-wide fedmsg-hub

    service: name=fedmsg-hub state=stopped enabled=no

  

- 

  - name: Install the service files

    template:

      src: "{{ item }}.service"
@@ -175,7 +175,13 @@ 

    with_items:

      - hubs-triage@

      - hubs-worker@

+     - hubs-sse

      - hubs-fedmsg-hub

+   register: service_installed

+ 

+ - name: reload systemd

+   command: systemctl daemon-reload

+   when: service_installed|changed

  

  - name: Start and enable the services

    service: name={{ item }} state=started enabled=yes
@@ -185,6 +191,7 @@ 

      - hubs-triage@2

      - hubs-worker@1

      - hubs-worker@2

+     - hubs-sse

      - hubs-fedmsg-hub

  

  

@@ -5,7 +5,7 @@ 

    with_items:

      - python-gunicorn

      - nginx

-     - make

+     - libsemanage-python

  

  

  - name: Generate SSL certificate and key
@@ -29,7 +29,7 @@ 

      - restart hubs webapp

  

  

- - name: Nginx configuration

+ - name: Nginx configuration for hubs

    template:

      src: nginx.conf

      dest: /etc/nginx/conf.d/hubs.conf
@@ -47,6 +47,18 @@ 

    notify:

      - restart nginx

  

+ 

+ - name: Nginx proxy configuration

+   copy:

+     src: "{{ item }}"

+     dest: /etc/nginx/proxy_params

+   with_first_found:

+     - nginx_proxy_params.{{ ansible_hostname }}

+     - nginx_proxy_params

+   notify:

+     - restart nginx

+ 

+ 

  - name: Allow network connection for Nginx

    seboolean:

      name: httpd_can_network_connect
@@ -60,6 +72,12 @@ 

      dest: /etc/systemd/system/{{ item }}.service

    with_items:

      - hubs-webapp

+   register: service_installed

+ 

+ 

+ - name: reload systemd

+   command: systemctl daemon-reload

+   when: service_installed|changed

  

  

  - name: Start and enable the services

@@ -0,0 +1,18 @@ 

+ [Unit]

+ Description=fedora-hubs SSE server

+ After=network.target

+ Documentation=https://pagure.io/fedora-hubs/

+ 

+ [Service]

+ ExecStart= \

+     {{ hubs_venv_dir }}/bin/python \

+     /usr/bin/twistd -l - --pidfile= \

+     -ny {{ hubs_code_dir }}/hubs/backend/sse_server.tac

+ Environment=HUBS_CONFIG={{ hubs_conf_dir }}/hubs_config.py

+ Type=simple

+ User={{ main_user }}

+ Group={{ main_user }}

+ Restart=on-failure

+ 

+ [Install]

+ WantedBy=multi-user.target

@@ -9,3 +9,12 @@ 

  

  OIDC_CLIENT_SECRETS = "{{ hubs_conf_dir }}/client_secrets.json"

  OIDC_OPENID_REALM = "{{ hubs_url }}/oidc_callback"

+ 

+ SSE_URL = {

+     # "host": "sse.example.com",

+     {% if hubs_dev_mode %}

+     "port": "8080",

+     {% else %}

+     "path": "/sse",

+     {% endif %}

+ }

@@ -9,6 +9,12 @@ 

      server 127.0.0.1:8000 fail_timeout=0;

  }

  

+ upstream hubs-sse {

+     # SSE server (twisted-based)

+     server 127.0.0.1:8080 fail_timeout=0;

+ }

+ 

+ 

  # Redirect cleartext traffic to HTTPS

  server {

      listen 80;
@@ -36,16 +42,21 @@ 

        try_files $uri @proxy_to_app;

      }

  

-     location @proxy_to_app {

-       proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

-       # enable this if and only if you use HTTPS

-       proxy_set_header X-Forwarded-Proto https;

-       proxy_set_header Host $http_host;

-       # we don't want nginx trying to do something clever with

-       # redirects, we set the Host: header above already.

-       proxy_redirect off;

+     location /sse/ {

+       include proxy_params;

+       proxy_pass http://hubs-sse/;

        # Allow long-running queries (SSE):

        proxy_buffering off;

+       proxy_http_version 1.1;

+       proxy_set_header Connection "";

+       proxy_cache off;

+       chunked_transfer_encoding off;

+       keepalive_timeout 0;

+       proxy_read_timeout 30m;

+     }

+ 

+     location @proxy_to_app {

+       include proxy_params;

        proxy_pass http://hubs;

      }

  

@@ -1,3 +1,3 @@ 

  ssl                  on;

- ssl_certificate      /etc/pki/tls/certs/localhost.crt;

- ssl_certificate_key  /etc/pki/tls/private/localhost.key;

+ ssl_certificate      {{ hubs_ssl_cert }};

+ ssl_certificate_key  {{ hubs_ssl_key }};

@@ -13,6 +13,7 @@ 

  

    roles:

      - role: hubs

+       hubs_url_hostname: localhost

        hubs_secret_key: vagrantmachinesecretkey

        hubs_db_type: sqlite

        hubs_dev_mode: true

file modified
-1
@@ -19,7 +19,6 @@ 

  

  import hubs.app

  import hubs.widgets

- import hubs.widgets.caching

  import hubs.models

  

  

file modified
+1 -1
@@ -6,6 +6,6 @@ 

  .. toctree::

     :maxdepth: 2

  

-    views

     widgets

+    utils

     auth

file added
+21
@@ -0,0 +1,21 @@ 

+ Utilities

+ =========

+ 

+ View utils

+ ----------

+ 

+ .. automodule:: hubs.utils.views

+    :members:

+ 

+ Caching utils

+ -------------

+ 

+ .. automodule:: hubs.utils.cache

+    :members:

+ 

+ Packages database utils

+ -----------------------

+ 

+ .. automodule:: hubs.utils.pkgdb

+    :members:

+ 

file removed
-9
@@ -1,9 +0,0 @@ 

- Views

- =====

- 

- View utils

- ----------

- 

- .. automodule:: hubs.utils.views

-    :members:

- 

file modified
+24 -16
@@ -376,7 +376,7 @@ 

    memcached cache when a fedmsg message comes in and once in javascript to tell

    the client to reload and redraw a oprtion of itself when a fedmsg comes in

    over the websocket firehose.

- - Instead, let's give fedora-hubs its own *widget-specific* `EventSource

+ - Instead, let's give fedora-hubs its own *page-specific* `EventSource

    <https://developer.mozilla.org/en-US/docs/Web/API/EventSource>`_ server that

    we tie in to the server-side cache-invalidation backend code.  I.e., when a

    message comes into the backend, it runs all the cache invalidation checkers
@@ -384,12 +384,18 @@ 

    we can emit events over EventSource to tell only *those* widgets on any

    connected clients to redraw themselves.

  

- As an aside, it became clear to me when making the diagram in the next section

- that, if we use handlebars.js and get rid of the server-side template

- rendering, then 1) the data returned by AJAX requests at page load and 2) the

- data pushed by the EventSource server can be *the exact same data*.  It will

- simplify and streamline the responsibilities of the pieces if the backend is

- worried *only* about these per-widget JSON responses.

+ Since the number of concurrent connections to a page is limited by the browser,

+ we can't have one EventSource connection per widget, but we can have an

+ EventSource connection per page, which will dispatch the index of the widget to

+ redraw in its ``data`` field.

+ 

+ The EventSource message will only tell the browser to redraw the widget, it

+ will not carry the changed data itself.  Otherwise, we would have to check

+ authentication and authorizations on the SSE server itself, which means

+ connecting to the database, and this must be done using non-blocking APIs so we

+ can't reuse our models.  Just sending a change notice makes it much easier, all

+ this control code can stay in the webapp itself.

+ 

  

  A picture is worth...

  ---------------------
@@ -407,8 +413,9 @@ 

  

  - The user requests the page and the wsgi app responds with some barebones HTML

    and enough javascript to get off the ground.

- - The user's browser runs javascript that *subscribes* it to the EventSource server.

- - The user's browser runs that javascript, which requests data for each of the

+ - The user's browser runs javascript that *subscribes* it to the EventSource

+   server.

+ - The user's browser runs javascript that requests data for each of the

    widgets defined on the page.

  - The wsgi app receives those requests and checks to see if the data for any of

    them is *cached in memcached*.  If it is, then it is returned.  If not, then
@@ -416,7 +423,8 @@ 

    response ready.  It is stuffed in memcached for later access and returned.

  - The client renders widgets as the data for each of its requests comes back.

  

- Later, what happens when a *trac ticket* is filed that should show up in some widget on their page?

+ Later, what happens when a *trac ticket* is filed that should show up in some

+ widget on their page?

  

  - The ticket is updated on fedorahosted.org and a fedmsg message is fired.

  - That is received by the hubs backend, which looks up *all* the cached
@@ -424,11 +432,11 @@ 

    mizmo's page, threebean's page, and on the design hub that should all get

    fresh data because of this change).

  - All of those widgets get their cached data nuked.

- - All of those widgets get their cached data rebuilt by calling ``data(...)`` on them.

+ - All of those widgets get their cached data rebuilt by calling ``data(...)``

+   on them.

  - An EventSource event is fired off for any listening clients that *new data is

-   available for widgets X, Y, and Z*.  The data is included in the EventSource

-   payload so the clients can immediately redraw without bothering to re-query

-   the wsgi app.

+   available for widgets X, Y, and Z*.

+ - The clients re-query the wsgi app for those widgets' content and redraw them.

  

  What happens when the user is viewing the *design team* hub and

  simultaneously, an admin *changes the configuration of a widget on that page*?
@@ -444,5 +452,5 @@ 

  - ...looks up the cache key for *widget X with the new configuration* and

    builds the cached data by calling ``data(...)`` on the widget.

  - An EventSource event is fired off which gets recieved by everyone looking at

-   the *design team hub*.  The widget on their pages gets redrawn with data from

-   the EventSource event.

+   the *design team hub*.  The widget on their pages gets redrawn after

+   requesting the new data from the wsgi app.

file modified
+1 -1
@@ -9,7 +9,7 @@ 

                       |  |               |   +--------------+                                                        | js activity graphs

                       |  +---------------+                                                                           |

                       |                                                                                              V

-                      |                                                   push updated JSON                +-------------------------+

+                      |                                                   push redraw notice               +-------------------------+

                       |                                                   to clients asap                  | web client              |

                       |                                          +---------------------------------------->| (built from HTML and JS |

                       |                                          |                                         | served by the wsgi app) |

file modified
+24 -18
@@ -67,28 +67,34 @@ 

      flask.g.fedmsg_config = fedmsg_config

  

      if OIDC.user_loggedin:

-         if not hasattr(flask.session, 'auth') or not flask.session.auth:

-             flask.session.auth = munch.Munch(

-                 fullname=OIDC.user_getfield('name'),

-                 nickname=(OIDC.user_getfield('nickname') or

-                           OIDC.user_getfield('sub')),

-                 email=OIDC.user_getfield('email'),

-                 timezone=OIDC.user_getfield('zoneinfo'),

+         if "auth" not in flask.session or not flask.session["auth"]:

+             user_info = OIDC.user_getinfo([

+                 "name", "nickname", "sub", "email", "zoneinfo", "cla",

+                 "groups",

+                 ])

+             flask.session["auth"] = dict(

+                 fullname=user_info['name'],

+                 nickname=(user_info['nickname'] or user_info['sub']),

+                 email=user_info['email'],

+                 timezone=user_info['zoneinfo'],

                  cla_done=('http://admin.fedoraproject.org/accounts/cla/done'

-                           in OIDC.user_getfield('cla')),

-                 groups=OIDC.user_getfield('groups'),

+                           in user_info['cla']),

+                 groups=user_info['groups'],

                  logged_in=True,

              )

-             flask.session.auth.avatar = username2avatar(

-                 flask.session.auth.nickname)

- 

-             user = hubs.models.User.get_or_create(

-                 flask.g.db, username=flask.session.auth.nickname,

-                 fullname=flask.session.auth.fullname)

-             flask.session.auth.user = user

-         flask.g.auth = flask.session.auth

+             flask.session["auth"]["avatar"] = username2avatar(

+                 flask.session["auth"]["nickname"])

+             # Changes on mutable objects aren't picked up:

+             # http://flask.pocoo.org/docs/0.12/api/#flask.session.modified

+             flask.session.modified = True

+         flask.g.auth = munch.Munch(**flask.session["auth"])

+         user = hubs.models.User.get_or_create(

+             flask.g.db, username=flask.session["auth"]["nickname"],

+             fullname=flask.session["auth"]["fullname"])

+         flask.g.user = user

      else:

-         flask.g.auth = munch.Munch(logged_in=False, user=None)

+         flask.g.auth = munch.Munch(logged_in=False)

+         flask.g.user = None

  

  

  # Register widgets

@@ -0,0 +1,215 @@ 

+ #!/usr/bin/env python

+ # coding: utf-8

+ 

+ from __future__ import unicode_literals, print_function

+ 

+ import json

+ 

+ import txredisapi as redis

+ 

+ from twisted.application import service

+ from twisted.web import server, resource

+ from twisted.internet import reactor, defer, task

+ from twisted.logger import (

+     Logger, globalLogBeginner, ILogObserver, formatEvent)

+ from zope.interface import provider

+ 

+ import hubs.app

+ 

+ 

+ log = Logger()

+ fedmsg_config = hubs.app.fedmsg_config

+ 

+ REDIS_CONF = {

+     'host': fedmsg_config.get('hubs.redis.host', 'localhost'),

+     'port': fedmsg_config.get('hubs.redis.port', 6379),

+     'dbid': fedmsg_config.get('hubs.redis.db', 0),

+     'password': fedmsg_config.get('hubs.redis.password', None),

+ }

+ 

+ 

+ class RetaskQueue(object):

+     """

+     Compatibility class with https://github.com/kushaldas/retask/ using the

+     txredisapi non-blocking API for Redis.

+     """

+ 

+     def __init__(self, name, connection=None):

+         self.conn = connection or redis.lazyConnection(**REDIS_CONF)

+         self.name = "retaskqueue-{}".format(name)

+ 

+     def wait(self, wait_time=0):

+         def format_result(msg):

+             task = json.loads(msg[1])

+             return json.loads(task["_data"])

+         d = self.conn.brpop(self.name, wait_time)

+         d.addCallback(format_result)

+         d.addErrback(self._error_handler)

+         return d

+ 

+     def length(self):

+         d = self.conn.llen(self.name)

+         d.addErrback(self._error_handler)

+         return d

+ 

+     def _error_handler(self, failure):

+         failure.trap(redis.ConnectionError)

+         log.debug("Lost connection to Redis, will attempt to reconnect "

+                   "in the background.")

+         return None

+ 

+     def disconnect(self):

+         return self.conn.disconnect()

+ 

+ 

+ class Subscribe(resource.Resource):

+     """

+     Twisted web resource allowing clients to subscribe to Redis messages.

+ 

+     The Redis messages will be broadcasted to all clients using the broadcast()

+     method.

+     """

+ 

+     isLeaf = True

+ 

+     def __init__(self):

+         resource.Resource.__init__(self)

+         self.subscribers = {}

+         self.allow_origin = fedmsg_config.get('hubs.sse.allow-origin', '*')

+ 

+     def render_GET(self, request):

+         """

+         Handle GET requests to this endpoint. Requests should be to

+         ``/[user|hub]/<target>/``.

+         """

+         # If the request ended in a trailing / the final path section is ''.

+         # This allows the trailing / on /<type>/<target>/ to be optional.

+         if request.postpath[-1] == '':

+             request.postpath.pop()

+         if len(request.postpath) != 2:

+             response = resource.ErrorPage(

+                 status=400, brief="Bad request", detail="Bad request.")

+             return response.render(request)

+         target = "/".join([p.decode("utf-8") for p in request.postpath])

+ 

+         request.setHeader('Content-Type', 'text/event-stream; charset=utf-8')

+         request.setHeader('Access-Control-Allow-Origin', self.allow_origin)

+         request.setHeader('X-Accel-Buffering', 'no')

+         request.write(b"")

+         request.notifyFinish().addBoth(self.request_closed, request, target)

+ 

+         if target not in self.subscribers:

+             self.subscribers[target] = []  # first request to this target.

+         log.debug("New subscriber: {req}", req=request)

+         self.subscribers[target].append(request)

+         self._send_event(request, "connected", "null")

+         log.debug("There is now {count} subscribers to {target}.",

+                   target=target, count=len(self.subscribers[target]))

+ 

+         return server.NOT_DONE_YET

+ 

+     def broadcast(self, target, event, data):

+         log.debug("Received {event} message to broadcast to {target}: {data}",

+                   target=target, event=event, data=data)

+         target_subscribers = self.subscribers.get(target, [])

+         if not target_subscribers:

+             return

+         log.debug("Broadcasting to {num} requests.",

+                   num=len(target_subscribers))

+         for request in target_subscribers:

+             self._send_event(request, event, data)

+ 

+     def keepalive(self):

+         all_subscribers = []

+         for subs in self.subscribers.values():

+             all_subscribers.extend(subs)

+         if not all_subscribers:

+             return

+         log.debug("Broadcasting a ping to {num} requests.",

+                   num=len(all_subscribers))

+         for request in all_subscribers:

+             self._send_event(request, "ping", "null")

+ 

+     def _send_event(self, request, event, data):

+         if event:

+             request.write("event: {}\r\n".format(event).encode("utf-8"))

+         request.write("data: {}\r\n".format(data).encode("utf-8"))

+         # The last CRLF is required to dispatch the event to the client.

+         request.write(b"\r\n")

+ 

+     def request_closed(self, err, request, target):

+         log.debug("Removing subscriber {subscriber} from {target}.",

+                   subscriber=request, target=target)

+         self.subscribers[target].remove(request)

+         log.debug("There is now {count} subscribers to {target}.",

+                   target=target, count=len(self.subscribers[target]))

+ 

+ 

+ class RedisClientService(service.Service):

+ 

+     def __init__(self, broadcaster):

+         self.broadcaster = broadcaster

+         self.queue = None

+         self._stopping = False

+ 

+     @defer.inlineCallbacks

+     def process(self):

+         """

+         Get the data from the Redis queue and broadcast it.

+         """

+ 

+         backlog = yield self.queue.length()

+         log.info("Starting Redis task dispatcher with a backlog of {num} "

+                  "messages.", num=backlog)

+         while True:

+             task = yield self.queue.wait()

+             if task is None:

+                 if self._stopping:

+                     break

+                 log.debug("No task received, or connection lost.")

+                 continue

+             try:

+                 task = json.loads(task)

+             except ValueError:

+                 log.warning("Invalid task received: {task!r}", task=task)

+                 continue

+             log.debug("Task received: {task!r}", task=task)

+             if "target" not in task or "data" not in task:

+                 log.warning("Invalid task received: {task!r}", task=task)

+                 continue

+             self.broadcaster.broadcast(

+                 task["target"], task.get("event"), task["data"])

+ 

+     def startService(self):

+         queue_name = fedmsg_config['hubs.sse.redis-queue-name']

+         self.queue = RetaskQueue(queue_name)

+         return reactor.callLater(0, self.process)

+ 

+     def stopService(self):

+         self._stopping = True

+         return self.queue.disconnect()

+ 

+ 

+ def main():

+     @provider(ILogObserver)

+     def printingObserver(event):

+         print(formatEvent(event))

+     globalLogBeginner.beginLoggingTo(

+         [printingObserver], redirectStandardIO=False)

+     # Web service

+     sub = Subscribe()

+     site = server.Site(sub)

+     port = int(hubs.app.app.config.get("SSE_URL", {}).get("port", 8080))

+     reactor.listenTCP(port, site)

+     # Redis client

+     redisclient = RedisClientService(sub)

+     redisclient.startService()

+     # Keepalive ping

+     keepalive = task.LoopingCall(sub.keepalive)

+     keepalive.start(30)

+     # Start the reactor

+     reactor.run()

+ 

+ 

+ if __name__ == "__main__":

+     main()

@@ -0,0 +1,43 @@ 

+ # You can run this .tac file directly with:

+ #    twistd -ny hubs/backend/sse_server.tac

+ 

+ """

+ This is an example .tac file which starts a webserver on port 8080 and

+ serves files from the current working directory.

+ 

+ The important part of this, the part that makes it a .tac file, is

+ the final root-level section, which sets up the object called 'application'

+ which twistd will look for

+ """

+ 

+ from __future__ import unicode_literals

+ 

+ from twisted.application import service, internet

+ from twisted.internet import reactor, task

+ from twisted.web import server

+ 

+ import hubs.app

+ from hubs.backend.sse_server import Subscribe, RedisClientService

+ 

+ 

+ sse_config = hubs.app.app.config.get("SSE_URL", {})

+ WEB_PORT = int(sse_config.get("port", 8080))

+ 

+ 

+ # this is the core part of any tac file, the creation of the root-level

+ # application object

+ application = service.Application("Hubs SSE Server")

+ 

+ # Web server

+ broadcaster = Subscribe()

+ site = server.Site(broadcaster)

+ web_service = internet.TCPServer(WEB_PORT, site)

+ web_service.setServiceParent(application)

+ 

+ # Redis client

+ redis_service = RedisClientService(broadcaster)

+ redis_service.setServiceParent(application)

+ 

+ # Keepalive ping

+ keepalive = task.LoopingCall(broadcaster.keepalive)

+ keepalive.start(30)

file modified
+49 -13
@@ -21,6 +21,7 @@ 

  

  from __future__ import unicode_literals

  

+ import argparse

  import json

  import logging

  import logging.config
@@ -30,6 +31,7 @@ 

  import retask.queue

  

  import hubs.app

+ import hubs.feed

  import hubs.models

  import hubs.widgets.base

  
@@ -39,7 +41,7 @@ 

  session = hubs.app.session

  

  

- def triage(outbound, msg):

+ def triage(msg):

      topic = msg['topic']

  

      for suffix in fedmsg_config['hubs.junk_suffixes']:
@@ -47,14 +49,23 @@ 

              log.debug("  Dropping junk %r", topic)

              return

  

-     # Get a list of all our widgets:

-     log.debug("Querying for all widgets.")

-     widgets = session.query(hubs.models.Widget).all()

+     # Handle user notifications.

+     # XXX: use the proper topic pattern when it's chosen

+     if ".fmn.notification." in topic:

+         yield retask.task.Task(json.dumps({

+             'type': 'notification',

+             'msg': msg,

+         }))

+         return  # Notifications don't appear anywhere else, stop here.

  

-     # Randomize so that all the triage daemons work on widgets in different

-     # orders.  This should hopefully prevent cache thrashing.

-     log.debug("Randomizing list of all widgets.")

-     random.shuffle(widgets)

+     # Store the list of concerned hubs to check later in the

+     # should_invalidate() method of Feed widgets.

+     msg["_hubs"] = hubs.feed.get_hubs_for_msg(msg)

+ 

+     # Store the message for the Feed widgets.

+     hubs.feed.on_new_message(msg)

+ 

+     widgets = get_widgets()

  

      def is_widget_update(msg, widget):

          """Always rebuild the cache when the widget config is updated."""
@@ -64,19 +75,44 @@ 

      log.debug("Checking should_invalidate for all widgets.")

      for widget in widgets:

          for fn_name, fn_class in widget.module.get_cached_functions().items():

-             if is_widget_update(msg, widget) or \

-                     fn_class(widget).should_invalidate(msg):

+             # log.debug("Checking %s:%s", widget.plugin, widget.idx)

+             fn = fn_class(widget)

+             if is_widget_update(msg, widget) or fn.should_invalidate(msg):

                  yield retask.task.Task(json.dumps({

+                     'type': 'widget-cache',

                      'idx': widget.idx,

+                     'hub': widget.hub.name,

                      'fn_name': fn_name,

                      'msg_id': msg['msg_id'],

                  }))

  

  

+ def get_widgets():

+     # Get a list of all our widgets:

+     log.debug("Querying for all widgets.")

+     widgets = session.query(hubs.models.Widget).all()

+     # Randomize so that all the triage daemons work on widgets in different

+     # orders.  This should hopefully prevent cache thrashing.

+     log.debug("Randomizing list of all widgets.")

+     random.shuffle(widgets)

+     # TODO: cache the widgets list until a widget is added or deleted?

+     return widgets

+ 

+ 

+ def parse_args(args):

+     parser = argparse.ArgumentParser(

+         description='Triage messages from the bus.')

+     parser.add_argument("-d", "--debug", action="store_true",

+                         help="debugging output level.")

+     return parser.parse_args()

+ 

+ 

  def main(args=None):

      args = args if args is not None else sys.argv

+     args = parse_args(args)

      logging.config.dictConfig(fedmsg_config['logging'])

-     logging.basicConfig()

+     log_level = logging.DEBUG if args.debug else logging.INFO

+     logging.basicConfig(level=log_level)

  

      # XXX - for flask.url_for to work

      hubs.app.app.config['SERVER_NAME'] = '0.0.0.0:5000'
@@ -97,12 +133,12 @@ 

          while True:

              task = inbound.wait()  # Wait forever...  timeout is optional.

              msg = json.loads(task.data)

-             log.info(

+             log.debug(

                  "(triage backlog: %r, work backlog: %r)  Working on %r %r",

                  inbound.length, outbound.length, msg['msg_id'], msg['topic'],

              )

              with hubs.app.app.app_context():  # so url_for works

-                 for task in triage(outbound, msg):

+                 for task in triage(msg):

                      log.debug("  Triage sending %r to workers." % task)

                      outbound.enqueue(task)

              log.debug("  Done with triage of %r.", msg['msg_id'])

file modified
+39 -7
@@ -26,14 +26,17 @@ 

  

  from __future__ import unicode_literals

  

+ import argparse

  import json

  import logging

  import logging.config

  import sys

  

+ import fedmsg.meta

  import retask.queue

  

  import hubs.app

+ import hubs.feed

  import hubs.models

  import hubs.widgets.base

  
@@ -43,21 +46,19 @@ 

  session = hubs.app.session

  

  

- def work(widget_idx, fn_name):

+ def widget_cache_work(widget_idx, fn_name):

      # Get a real widget, because we update last_refreshed on it.

      widget = hubs.models.Widget.by_idx(widget_idx)

      fn_class = widget.module.get_cached_functions()[fn_name]

      log.info("! Rebuilding cache for %r:%r" % (widget, fn_name))

      # Invalidate the cache...

      fn_class(widget).rebuild()

-     # TODO -- fire off an EventSource notice that we updated stuff

  

  

- def handle(idx, fn_name):

- 

+ def handle_widget_cache(widget_idx, fn_name):

      try:

          with hubs.app.app.app_context():  # so url_for works

-             work(idx, fn_name)

+             widget_cache_work(widget_idx, fn_name)

          session.commit()  # transaction is committed here

      except:

          session.rollback()  # rolls back the transaction
@@ -66,10 +67,19 @@ 

          session.close()

  

  

+ def parse_args(args):

+     parser = argparse.ArgumentParser(description='Rebuild widget caches.')

+     parser.add_argument("-d", "--debug", action="store_true",

+                         help="debugging output level.")

+     return parser.parse_args()

+ 

+ 

  def main(args=None):

      args = args if args is not None else sys.argv

+     args = parse_args(args)

      logging.config.dictConfig(fedmsg_config['logging'])

-     logging.basicConfig()

+     log_level = logging.DEBUG if args.debug else logging.INFO

+     logging.basicConfig(level=log_level)

  

      # XXX - for flask.url_for to work

      hubs.app.app.config['SERVER_NAME'] = '0.0.0.0:5000'
@@ -78,6 +88,10 @@ 

      log.info("Worker starting, connecting to retask queue %r." % name)

      queue = retask.queue.Queue(name)

      queue.connect()

+     sse_name = fedmsg_config['hubs.sse.redis-queue-name']

+     log.info("Worker starting, connecting to retask queue %r." % sse_name)

+     sse_queue = retask.queue.Queue(sse_name)

+     sse_queue.connect()

  

      try:

          log.info("Entering work loop.")
@@ -85,7 +99,25 @@ 

              task = queue.wait()  # Wait forever...  timeout is optional.

              log.info("Working on %r.  (backlog is %r)" % (task, queue.length))

              item = json.loads(task.data)

-             handle(item['idx'], item['fn_name'])

+             sse_task = None

+             item_type = item.get("type", "widget-cache")

+             if item_type == "widget-cache":

+                 handle_widget_cache(item['idx'], item['fn_name'])

+                 sse_task = {

+                     "event": "hubs:widget-updated",

+                     "data": item["idx"],

+                     "target": "hub/{}".format(item["hub"]),

+                 }

+             elif item_type == "notification":

+                 hubs.feed.on_new_notification(item["msg"])

+                 username = fedmsg.meta.msg2agent(item["msg"])

+                 sse_task = {

+                     "event": "hubs:new-notification",

+                     "data": username,

+                     "target": "user/{}".format(username),

+                 }

+             if sse_task is not None:

+                 sse_queue.enqueue(retask.task.Task(json.dumps(sse_task)))

              log.debug("  Done.")

      except KeyboardInterrupt:

          pass

file modified
+5 -1
@@ -11,7 +11,11 @@ 

  

  HUB_OF_THE_MONTH = 'commops'

  

- SSE_URL = 'http://localhost:8080/user/'

+ SSE_URL = {

+     # "host": "sse.example.com",

+     "port": "8080",

+     # "path": "/sse",

+ }

  PLUS_PLUS_URL = 'http://localhost:5001/user/'

  PLUS_PLUS_TOKEN = 'thisismytoken'

  

@@ -9,6 +9,7 @@ 

      # For communication between fedmsg-hub and the cache worker farm

      'hubs.redis.triage-queue-name': 'fedora-hubs-triage-queue',

      'hubs.redis.work-queue-name': 'fedora-hubs-work-queue',

+     'hubs.sse.redis-queue-name': 'fedora-hubs-sse-queue',

  

      # Junk junk junk.. that we don't care about.  Drop it!

      'hubs.junk_suffixes': [

file added
+210
@@ -0,0 +1,210 @@ 

+ from __future__ import unicode_literals

+ 

+ import hashlib

+ import logging

+ import os

+ import re

+ 

+ import fedmsg.meta

+ import flask

+ import redis

+ from fedmsg.encoding import loads, dumps

+ 

+ import hubs.app

+ from hubs.models import Hub, User, Association

+ 

+ 

+ log = logging.getLogger(__name__)

+ 

+ 

+ def get_hubs_for_msg(msg):

+     hubs = []

+     for username in fedmsg.meta.msg2usernames(msg):

+         user = User.query.get(username)

+         # Only act on existing users

+         if user is None:

+             log.debug("Message concerning an unknown user: %s", username)

+             continue

+         hubs.append(username)

+         group_hubs = Hub.query.filter(

+                 Hub.user_hub == False,  # noqa:E712

+             ).join(Association).filter(

+                 Association.user == user,

+                 Association.role.in_(["owner", "member"]),

+             )

+         hubs.extend([result[0] for result in group_hubs.values(Hub.name)])

+     return hubs

+ 

+ 

+ def on_new_notification(msg):

+     """Only for FMN messages."""

+     username = fedmsg.meta.msg2agent(msg)

+     user = User.query.get(username)

+     # Only act on existing users

+     if user is None:

+         log.debug("Notification for an unknown user: %s", username)

+         return

+     # Users may exists without their hub if they have never logged

+     # in but are just added to the members list.  Don't check that

+     # the user Hub actually exists, it will be created when the user

+     # logs in, and this way the feed will be already populated.

+     log.debug("Received a notification concerning %s", username)

+     feed = Notifications(username)

+     feed.add(msg)

+ 

+ 

+ def on_new_message(msg):

+     for hub_name in msg["_hubs"]:

+         log.debug("Received a feed item for hub %s", hub_name)

+         feed = Activity(hub_name)

+         feed.add(msg)

+ 

+ 

+ def format_msg(msg):

+     if "msg_ids" not in msg:

+         msg = fedmsg.meta.conglomerate([msg])[0]

+     # Compute a deterministic dom_id.  Since this is the identifier stored

+     # when the message is saved by the user in the SQL DB, it has to be

+     # invariant through conglomerate() calls.

+     msg["dom_id"] = hashlib.sha1(

+         b":".join(

+             [mid.encode("utf-8") for mid in sorted(msg["msg_ids"])]

+         )).hexdigest()

+     # TODO: generate markup

+     msg["markup"] = _make_hub_links(msg, "subtitle")

+     msg["markup_subjective"] = _make_hub_links(msg, "subjective")

+     return msg

+ 

+ 

+ _word_split_re = re.compile(r'(\s+)')

+ _punctuation_re = re.compile(

+     '^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % (

+         '|'.join(map(re.escape, ('(', '<', '&lt;'))),

+         '|'.join(map(re.escape, ('.', ',', ')', '>', '\n', '&gt;', "'s")))

+     )

+ )

+ 

+ 

+ def _make_hub_links(msg, attr):

+     if not msg.get(attr):

+         return ""

+     existing_usernames = [r[0] for r in User.query.values(User.username)]

+     usernames = [u for u in msg["usernames"] if u in existing_usernames]

+     words = _word_split_re.split(msg[attr])

+     for i, word in enumerate(words):

+         match = _punctuation_re.match(word)

+         if match:

+             lead, middle, trail = match.groups()

+             if middle in usernames:

+                 middle = '<a href="{}">{}</a>'.format(

+                     flask.url_for("hub", name=middle), middle)

+             if lead + middle + trail != word:

+                 words[i] = lead + middle + trail

+     return ''.join(words)

+ 

+ 

+ class Feed(object):

+ 

+     max_items = 100

+     connection_pool = None

+     msgtype = None

+ 

+     def __init__(self, owner):

+         """

+         Args:

+             owner (str): User name or Hub name.

+         """

+         if self.msgtype is None:

+             raise NotImplementedError(

+                 "You must subclass Feed and set self.msgtype.")

+         self.owner = owner

+         self.db = None

+         fedmsg_config = hubs.app.fedmsg_config

+         self.db_config = {

+             'host': fedmsg_config.get('hubs.redis.host', 'localhost'),

+             'port': fedmsg_config.get('hubs.redis.port', 6379),

+             'db': fedmsg_config.get('hubs.redis.db', 0),

+             'password': fedmsg_config.get('hubs.redis.password', None),

+             }

+         self.key = self._get_key()

+ 

+     def _get_key(self):

+         parts = ["feed", self.msgtype, self.owner]

+         key_prefix = hubs.app.fedmsg_config.get('hubs.redis.feed-prefix')

+         if key_prefix:

+             parts.insert(0, key_prefix)

+         return "|".join(parts)

+ 

+     def get(self, start=0, end=None):

+         end = end or -1

+         if self.db is None:

+             self.connect()

+         return [

+             loads(item.decode("utf-8")) for item in

+             self.db.lrange(self.key, start, end)

+             ]

+ 

+     def add(self, msg):

+         if self.db is None:

+             self.connect()

+         log.debug("Adding message %s to %s", msg["msg_id"], self.key)

+         self.db.lpush(self.key, dumps(self._format_msg(msg)))

+         self.db.ltrim(self.key, 0, self.max_items)

+ 

+     def length(self):

+         if self.db is None:

+             self.connect()

+         return self.db.llen(self.key)

+ 

+     __len__ = length

+ 

+     def connect(self):

+         if self.connection_pool is None:

+             self.connection_pool = redis.ConnectionPool(**self.db_config)

+         self.db = redis.Redis(connection_pool=self.connection_pool)

+         try:

+             self.db.info()

+         except redis.exceptions.ConnectionError:

+             log.warning("Could not connect to Redis")

+             self.db = None

+             raise  # XXX: do better?

+ 

+     def close(self):

+         try:

+             self.db.close()

+         except redis.exceptions.ConnectionError:

+             log.warning("Could not disconnect from Redis")

+             raise  # XXX: do better?

+         self.db = None

+         self.connection_pool = None

+ 

+ 

+ class Notifications(Feed):

+ 

+     msgtype = "notif"

+ 

+     def _format_msg(self, msg):

+         return format_msg(msg)

+ 

+ 

+ class Activity(Feed):

+ 

+     msgtype = "activity"

+ 

+     def _format_msg(self, msg):

+         # No-op, we want raw messages in the DB to conglomerate and format them

+         # later.

+         return msg

+ 

+ 

+ def _load_json(username):

+     """This loads dummy data in the feeds."""

+     here = os.path.dirname(os.path.abspath(__file__))

+     with open(os.path.join(here, 'actions.json')) as fp:

+         msgs = loads(fp.read())

+     for feed_class in (Notifications, Activity):

+         feed = feed_class(username)

+         if feed.db is None:

+             feed.connect()

+         for msg in msgs:

+             feed.db.rpush(feed.key, dumps(msg))

@@ -1,34 +0,0 @@ 

- /* eslint-env jasmine, jest */

- 

- jest.unmock('../widgets/feed/Feed.jsx');

- 

- import React from 'react';

- import TestUtils from 'react-addons-test-utils';

- 

- import Feed from '../widgets/feed/Feed.jsx';

- 

- describe('Feed', () => {

-   const matches = [{

-     markup: '<a href="https://pagure.io/fedora-hubs">Link</a>',

-     date_time: new Date(),

-     link: 'https://pagure.io/fedora-hubs',

-     secondary_icon: 'https://placekitten.com/g/200/300',

-   }];

- 

-   const options = {

-     messageLimit: 100

-   };

- 

-   it('should set the correct state', () => {

-     const component = TestUtils.renderIntoDocument(

-       <Feed matches={matches} options={options} />

-     );

- 

-     const state = {

-       matches,

-       messageLimit: 100,

-       sse: true,

-     };

-     expect(component.state).toEqual(state);

-   });

- });

@@ -1,71 +0,0 @@ 

- import React from 'react';

- 

- export default class Dropdown extends React.Component {

-   save() {

-     const payload = {

-       link: this.props.match.link,

-       markup: this.props.match.markup,

-       secondary_icon: this.props.match.secondary_icon,

-       dom_id: this.props.match.dom_id,

-     };

- 

-     $.ajax({

-       type: 'POST',

-       url: this.props.options.saveUrl,

-       data: JSON.stringify(payload),

-       contentType: 'application/json',

-     }).done(() => {

-       const id = `#save-${this.props.match.dom_id}`;

-       const $saveBtn = $(id);

-       $saveBtn.removeClass('btn-primary').addClass('btn-success');

-       $saveBtn.text('Saved');

-     });

-   }

- 

-   delete() {

-     $.ajax({

-       type: 'DELETE',

-       url: `${this.props.options.saveUrl}${this.props.match.idx}/`,

-     }).done((resp) => {

-       if (resp.status_code === 200) {

-         const $notification = $(`#${this.props.match.dom_id}`);

-         $notification.closest('.card-block').remove();

-       }

-     });

-   }

- 

-   render() {

-     let saveBtn;

-     if (!this.props.match.saved && this.props.options.saveUrl) {

-       saveBtn = (

-         <button

-           id={`save-${this.props.match.dom_id}`}

-           className="btn btn-sm btn-primary"

-           onClick={this.save.bind(this)}

-           href="#"

-         >

-           Save

-         </button>

-       );

-     }

-     let deleteBtn;

-     if (this.props.options.delete && this.props.options.saveUrl) {

-       deleteBtn = (

-         <button

-           id={`delete-${this.props.match.dom_id}`}

-           className="btn btn-sm btn-danger"

-           onClick={this.delete.bind(this)}

-         >

-           Remove

-         </button>

-       );

-     }

- 

-     return (

-       <div className="float-right">

-        {saveBtn}

-         {deleteBtn}

-       </div>

-     );

-   }

- }

@@ -1,28 +0,0 @@ 

- import React from 'react';

- import Dropdown from './Dropdown.jsx';

- import TimeAgo from 'react-timeago';

- 

- 

- export default class Markup extends React.Component {

-   createMarkup() {

-     return { __html: this.props.match.markup };

-   }

-   render() {

-     const timestamp = this.props.match.date_time ? (<TimeAgo date={this.props.match.date_time} />) : null;

- 

-     return (

-       <div className="media-body">

-         <h4

-           className="media-heading"

-           dangerouslySetInnerHTML={this.createMarkup()}

-         >

-         </h4>

-         {timestamp}

-         <Dropdown

-           match={this.props.match}

-           options={this.props.options}

-         />

-       </div>

-     );

-   }

- }

@@ -1,19 +0,0 @@ 

- import React from 'react';

- 

- import Icon from './Icon.jsx';

- import Markup from './Markup.jsx';

- 

- class Panel extends React.Component {

-   render() {

-     return (

-       <div className="card">

-         <div className="card-block" id={this.props.match.id}>

-           <Icon match={this.props.match} />

-           <Markup options={this.props.options} match={this.props.match} />

-         </div>

-       </div>

-     );

-   }

- }

- 

- export default Panel;

@@ -0,0 +1,39 @@ 

+ import React from 'react';

+ 

+ 

+ export default class Actions extends React.Component {

+ 

+   render() {

+     var buttonProps = {

+       id: `save-${this.props.item.dom_id}`,

+       className: "btn btn-sm ",

+     };

+     var buttonText;

+ 

+     if (this.props.item.saved) {

+       buttonProps.className += "btn-success";

+       buttonProps.onClick = () => {

+         this.props.handleUnsave(this.props.item);

+       };

+       buttonText = "Saved";

+     } else {

+       buttonProps.className += "btn-primary";

+       buttonProps.onClick = () => {

+         this.props.handleSave(this.props.item);

+       };

+       buttonText = "Save";

+     }

+ 

+     return (

+       <div className="float-right">

+         <button {...buttonProps}>

+           {buttonText}

+         </button>

+       </div>

+     );

+   }

+ 

+ }

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -0,0 +1,52 @@ 

+ import React from 'react';

+ import PropTypes from 'prop-types';

+ import {

+   IntlProvider,

+   defineMessages,

+   FormattedMessage,

+   } from 'react-intl';

+ import Panel from './Panel.jsx';

+ 

+ 

+ const messages = defineMessages({

+   no_items: {

+     id: "hubs.components.feed.no_items",

+     defaultMessage: "No items yet.",

+   },

+ });

+ 

+ 

+ export default class Feed extends React.Component {

+ 

+   render() {

+     var items = this.props.items || [];

+     items = items.map((item, idx) => {

+       return (

+         <Panel item={item} {...this.props} key={item.dom_id} />

+         );

+     });

+     return (

+       <IntlProvider locale={navigator.language}>

+         <div className="component-feed">

+           { (items.length == 0 && this.props.loaded) ?

+             <FormattedMessage {...messages.no_items} tagName="i" />

+             :

+             items

+           }

+         </div>

+       </IntlProvider>

+     );

+   }

+ 

+ }

+ 

+ 

+ Feed.propTypes = {

+   loaded: PropTypes.bool,

+ };

+ Feed.defaultProps = {

+   loaded: true,

+ };

+ 

+ 

+ // vim: set ts=2 sw=2 et:

hubs/static/client/app/components/feed/Icon.jsx hubs/static/client/app/components/Icon.jsx
file renamed
+13 -13
@@ -1,21 +1,21 @@ 

  import React from 'react';

  

- class Icon extends React.Component {

+ export default class Icon extends React.Component {

    render() {

      return (

-       <div>

-         <div className="media-left">

-           <a href={this.props.match.link ? this.props.match.link : '#'} target="_blank">

-             <img

-               alt="User avatar"

-               className="media-object square-32 img-circle"

-               src={this.props.match.secondary_icon}

-             />

-           </a>

-         </div>

-       </div>

+       <a href={this.props.item.link ? this.props.item.link : '#'}

+          target="_blank"

+          className="d-flex align-self-start mr-3"

+          >

+         <img

+           alt="User avatar"

+           className="square-32 img-circle"

+           src={this.props.item.secondary_icon}

+         />

+       </a>

      );

    }

  }

  

- export default Icon;

+ 

+ // vim: set ts=2 sw=2 et:

@@ -0,0 +1,133 @@ 

+ import React from 'react';

+ 

+ 

+ export default class ItemsGetter extends React.Component {

+ 

+   constructor(props) {

+     super(props);

+     this.state = {

+       sseError: false,

+       loading: false,

+     };

+     this.sseSource = null;

+     this.setupSSESource = this.setupSSESource.bind(this);

+     this.tearDownSSESource = this.tearDownSSESource.bind(this);

+     this.handleSSEEvent = this.handleSSEEvent.bind(this);

+     this.handleSSEEventError = this.handleSSEEventError.bind(this);

+     this.handleSSEEventOpen = this.handleSSEEventOpen.bind(this);

+     this.loadFromServer = this.loadFromServer.bind(this);

+   }

+ 

+   componentDidMount() {

+     this.setupSSESource();

+     this.loadFromServer();

+   }

+ 

+   componentWillUnmount() {

+     this.serverRequest.abort();

+     this.tearDownSSESource();

+     window.removeEventListener("beforeunload", this.tearDownSSESource);

+   }

+ 

+   setupSSESource() {

+     if (!this.props.sse ||

+         !this.props.sse.eventName ||

+         !this.props.sse.shouldReload) {

+       // Auto-update is disabled.

+       return;

+     }

+ 

+     this.sseSource = document.sseSource;

+     if (!this.sseSource || this.sseSource.readyState === EventSource.CLOSED) {

+       this.handleSSEEventError();

+       return;

+     }

+ 

+     this.sseSource.addEventListener('open', this.handleSSEEventOpen);

+     this.sseSource.addEventListener('error', this.handleSSEEventError);

+     this.sseSource.addEventListener(

+       this.props.sse.eventName, this.handleSSEEvent);

+     // This is necessary to avoid the error message being displayed right

+     // before the user changes page.

+     // Pretty much like: https://bugzilla.mozilla.org/show_bug.cgi?id=833462

+     window.addEventListener("beforeunload", this.tearDownSSESource);

+   }

+ 

+   tearDownSSESource() {

+     if (!this.sseSource) { return; }

+     this.sseSource.removeEventListener(

+       this.props.sse.eventName, this.handleSSEEvent);

+     this.sseSource.removeEventListener('error', this.handleSSEEventError);

+     this.sseSource = null;

+   }

+ 

+   handleSSEEvent(e) {

+     if (this.props.sse.shouldReload(e.data)) {

+       this.loadFromServer();

+     }

+   }

+ 

+   handleSSEEventOpen(e) {

+     this.setState({sseError: false});

+   }

+ 

+   handleSSEEventError(e) {

+     console.log(e);

+     if (this.sseSource.readyState === 2) {

+       // Connection closed.

+       this.setState({

+         sseError: ("Cannot auto-update the feed, you will have to refresh "

+                   +"the page manually to see new elements.")

+       });

+     } else if (this.sseSource.readyState === 0) {

+       // Reconnecting

+       this.setState({

+         sseError: "Reconnecting to the auto-update source...",

+       });

+     }

+   }

+ 

+   loadFromServer() {

+     this.setState({loading: true});

+     if (this.serverRequest &&

+         this.serverRequest.readyState !== XMLHttpRequest.DONE) {

+       this.serverRequest.abort();

+     }

+     this.serverRequest = $.ajax({

+       url: this.props.url,

+       method: 'GET',

+       dataType: 'json',

+       cache: false,

+       success: (data, textStatus, jqXHR) => {

+         this.props.handleData(data.data);

+       },

+       error: (xhr, status, err) => {

+         console.error(status, err.toString());

+       },

+       complete: (xhr, status) => {

+         this.setState({loading: false});

+       },

+     });

+   }

+ 

+   render() {

+     return (

+       <div>

+         {this.props.children}

+         <div

+           className="loading mx-auto"

+           style={{display: this.state.loading ? "block" : "none"}}

+           ></div>

+         { (this.state.sseError !== false) &&

+           <div className="alert alert-warning mt-3">

+             {this.state.sseError}

+           </div>

+         }

+       </div>

+     );

+   }

+ 

+ }

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -0,0 +1,53 @@ 

+ import React from 'react';

+ import TimeAgo from 'react-timeago';

+ 

+ 

+ export default class Markup extends React.Component {

+ 

+   constructor(props) {

+     super(props);

+     this.createMarkup = this.createMarkup.bind(this);

+   }

+ 

+   createMarkup(item) {

+     return { __html: item.markup || item.subtitle };

+   }

+ 

+   render() {

+     const timestamp = this.props.item.end_time || this.props.item.timestamp;

+     const submsgs = this.props.item.msg_ids;

+     var submessages = [];

+     if (submsgs && Object.keys(submsgs).length > 1) {

+       submessages = Object.keys(submsgs).map((msgid) => {

+         return (

+           <li key={msgid}>

+             <small dangerouslySetInnerHTML={this.createMarkup(submsgs[msgid])}></small>

+           </li>

+         );

+       });

+     }

+ 

+     return (

+       <div className="media-body align-self-start">

+         <span dangerouslySetInnerHTML={this.createMarkup(this.props.item)}></span>

+         {timestamp &&

+           <small className="text-muted ml-3">

+             <TimeAgo date={timestamp * 1000} />

+           </small>

+         }

+         {submessages.length !== 0 &&

+           <ul

+             className="submessages"

+             style={{display: this.props.detailsOpened ? "block" : "none" }}

+             >

+             {submessages}

+           </ul>

+         }

+       </div>

+     );

+   }

+ 

+ }

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -0,0 +1,52 @@ 

+ import React from 'react';

+ import Icon from './Icon.jsx';

+ import Markup from './Markup.jsx';

+ import Actions from './Actions.jsx';

+ 

+ 

+ export default class Panel extends React.Component {

+ 

+   constructor(props) {

+     super(props);

+     this.state = {

+       detailsOpened: false,

+     }

+     this.toggleDetails = this.toggleDetails.bind(this);

+   }

+ 

+   toggleDetails(e) {

+     e.preventDefault();

+     this.setState((prevState, props) => (

+       {detailsOpened: !prevState.detailsOpened}

+     ));

+   }

+ 

+   render() {

+     return (

+       <div className="media py-3" id={this.props.item.id}>

+         <Icon item={this.props.item} />

+         <Markup

+           {...this.props}

+           detailsOpened={this.state.detailsOpened}

+           />

+         <div className="d-flex flex-column align-self-start ml-3 text-right">

+           { (this.props.handleSave || this.props.handleUnsave) &&

+             <Actions {...this.props} />

+           }

+           { (this.props.item.msg_ids &&

+              Object.keys(this.props.item.msg_ids).length > 1) &&

+             <div className="mt-auto">

+               <small>

+                 <a href="#" onClick={this.toggleDetails}>details</a>

+               </small>

+             </div>

+           }

+         </div>

+       </div>

+     );

+   }

+ 

+ }

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -0,0 +1,34 @@ 

+ /* eslint-env jasmine, jest */

+ 

+ jest.unmock('../Feed.jsx');

+ 

+ import React from 'react';

+ import ReactDOM from 'react-dom';

+ import TestUtils from 'react-addons-test-utils';

+ 

+ import Feed from '../Feed.jsx';

+ import Panel from '../Panel.jsx';

+ 

+ describe('Feed', () => {

+   const item = {

+     markup: '<a href="https://pagure.io/fedora-hubs">Link</a>',

+     date_time: new Date(),

+     link: 'https://pagure.io/fedora-hubs',

+     secondary_icon: 'https://placekitten.com/g/200/300',

+   }

+   var items = [item, item, item];

+   items = items.map((obj, idx) => {

+     return Object.assign({dom_id: "item-" + idx}, obj);

+   });

+ 

+   it('should create the children', () => {

+     const component = TestUtils.renderIntoDocument(

+       <Feed items={items} />

+     );

+     const node = ReactDOM.findDOMNode(component);

+     expect(Panel.mock.calls.length).toEqual(3);

+   });

+ });

+ 

+ 

+ // vim: set ts=2 sw=2 et:

hubs/static/client/app/components/feed/__tests__/Icon.test.js hubs/static/client/app/__tests__/Icon.test.js
file renamed
+10 -8
@@ -1,36 +1,38 @@ 

  /* eslint-env jasmine, jest */

  

- jest.unmock('../components/Icon.jsx');

+ jest.unmock('../Icon.jsx');

  

  import React from 'react';

  import ReactDOM from 'react-dom';

  import TestUtils from 'react-addons-test-utils';

  

- import Icon from '../components/Icon.jsx';

+ import Icon from '../Icon.jsx';

  

  describe('Icon', () => {

-   const match = {

+   const item = {

      link: 'https://pagure.io/fedora-hubs',

      secondary_icon: 'https://placekitten.com/g/200/300',

    };

  

    it('has the correct link', () => {

      const component = TestUtils.renderIntoDocument(

-       <Icon match={match} />

+       <Icon item={item} />

      );

      expect(component).toBeTruthy();

      const node = ReactDOM.findDOMNode(component);

-     const link = node.querySelector('a');

-     expect(link.getAttribute('href')).toEqual(match.link);

+     expect(node.getAttribute('href')).toEqual(item.link);

    });

    it('has the correct image source', () => {

      const component = TestUtils.renderIntoDocument(

-       <Icon match={match} />

+       <Icon item={item} />

      );

      expect(component).toBeTruthy();

  

      const node = ReactDOM.findDOMNode(component);

      const image = node.querySelector('img');

-     expect(image.getAttribute('src')).toEqual(match.secondary_icon);

+     expect(image.getAttribute('src')).toEqual(item.secondary_icon);

    });

  });

+ 

+ 

+ // vim: set ts=2 sw=2 et:

hubs/static/client/app/components/feed/__tests__/Markup.test.js hubs/static/client/app/__tests__/Markup.test.js
file renamed
+9 -6
@@ -1,30 +1,33 @@ 

  /* eslint-env jasmine, jest */

  

- jest.unmock('../components/Markup.jsx');

+ jest.unmock('../Markup.jsx');

  

  import React from 'react';

  import ReactDOM from 'react-dom';

  import TestUtils from 'react-addons-test-utils';

  

- import Markup from '../components/Markup.jsx';

+ import Markup from '../Markup.jsx';

  

  describe('Markup', () => {

-   const match = {

+   const item = {

      markup: '<a href="https://pagure.io/fedora-hubs">Link</a>',

      date_time: new Date(),

    };

  

    it('should set the inner html of the markup', () => {

      const component = TestUtils.renderIntoDocument(

-       <Markup match={match} />

+       <Markup item={item} />

      );

      expect(component).toBeTruthy();

  

      const node = ReactDOM.findDOMNode(component);

-     const header = node.querySelector('h4');

+     const markup = node.querySelector('span');

  

-     expect(header.innerHTML).toEqual(match.markup);

+     expect(markup.innerHTML).toEqual(item.markup);

    });

  

    // we don't test TimeAgo, the library should have its own tests

  });

+ 

+ 

+ // vim: set ts=2 sw=2 et:

hubs/static/client/app/components/feed/__tests__/Panel.test.js hubs/static/client/app/__tests__/Panel.test.js
file renamed
+25 -6
@@ -1,30 +1,49 @@ 

  /* eslint-env jasmine, jest */

  

- jest.unmock('../components/Panel.jsx');

+ jest.unmock('../Panel.jsx');

  

  import React from 'react';

  import ReactDOM from 'react-dom';

  import TestUtils from 'react-addons-test-utils';

  

- import Panel from '../components/Panel.jsx';

+ import Panel from '../Panel.jsx';

  

  describe('Panel', () => {

-   const match = {

+   const item = {

      markup: '<a href="https://pagure.io/fedora-hubs">Link</a>',

      date_time: new Date(),

      link: 'https://pagure.io/fedora-hubs',

      secondary_icon: 'https://placekitten.com/g/200/300',

+     msg_ids: {

+       foobar1: {},

+       foobar2: {},

+     },

    };

  

-   it('should render the card block', () => {

+   it('should render the actions block', () => {

      const component = TestUtils.renderIntoDocument(

-       <Panel match={match} />

+       <Panel item={item} />

      );

      expect(component).toBeTruthy();

  

      const node = ReactDOM.findDOMNode(component);

-     const block = node.querySelector('.card-block');

+     const block = node.querySelector('div.ml-3.text-right');

  

      expect(block).toBeTruthy();

    });

+ 

+   it('should show the details link', () => {

+     const component = TestUtils.renderIntoDocument(

+       <Panel item={item} />

+     );

+     expect(component).toBeTruthy();

+ 

+     const node = ReactDOM.findDOMNode(component);

+     const details = node.querySelector('.text-right small a');

+ 

+     expect(details).toBeTruthy();

+   });

  });

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -1,3 +1,4 @@ 

  import HubConfig from './HubConfig.jsx';

+ import Streams from './Streams.jsx';

  

- module.exports = {HubConfig}

+ module.exports = {HubConfig, Streams}

@@ -0,0 +1,167 @@ 

+ import React from 'react';

+ import {

+   IntlProvider,

+   defineMessages,

+   FormattedMessage,

+   } from 'react-intl';

+ import TabSet from '../components/TabSet.jsx';

+ import ItemsGetter from '../components/feed/ItemsGetter.jsx';

+ import Feed from '../components/feed/Feed.jsx';

+ 

+ 

+ export default class Streams extends React.Component {

+ 

+   constructor(props) {

+     super(props);

+     this.state = {

+       notifItems: [],

+       savedItems: [],

+     };

+     this.handleStreamData = this.handleStreamData.bind(this);

+     this.handleSavedData = this.handleSavedData.bind(this);

+     this.handleSave = this.handleSave.bind(this);

+     this.handleUnsave = this.handleUnsave.bind(this);

+   }

+ 

+   handleStreamData(data) {

+     this.setState({notifItems: data});

+   }

+ 

+   handleSavedData(data) {

+     this.setState({savedItems: data});

+   }

+ 

+   handleSave(item) {

+     if (!this.props.urls.saved) { return; }

+     const payload = {

+       link: item.link,

+       markup: item.markup,

+       secondary_icon: item.secondary_icon,

+       dom_id: item.dom_id,

+     };

+     $.ajax({

+       type: 'POST',

+       url: this.props.urls.saved,

+       data: JSON.stringify(payload),

+       contentType: 'application/json',

+       success: (data) => {

+         this.setState((prevState, props) => {

+           prevState.savedItems.push(data.data);

+           return {savedItems: prevState.savedItems};

+         });

+       },

+     });

+   }

+ 

+   handleUnsave(item) {

+     if (!this.props.urls.saved) { return; }

+     var updateSavedItems = (item) => {

+       this.setState((prevState, props) => {

+         var items = prevState.savedItems.filter(

+           (currentItem) => (currentItem.dom_id !== item.dom_id)

+         );

+         return {savedItems: items};

+       });

+     };

+     if (item.idx) {

+       // Already saved

+       $.ajax({

+         type: 'DELETE',

+         url: `${this.props.urls.saved}${item.idx}/`,

+         success: () => { updateSavedItems(item) },

+       });

+     } else {

+       updateSavedItems(item);

+     }

+   }

+ 

+   render() {

+     // Add saved state.

+     var savedItemsIds = this.state.savedItems.map((item) => {

+       return item.dom_id;

+     });

+     var notifs = this.state.notifItems.map((item) => {

+       item.saved = (savedItemsIds.indexOf(item.dom_id) !== -1);

+       return item;

+     });

+ 

+     return (

+       <IntlProvider locale={navigator.language}>

+         <TabSet tabListClass="mb-3">

+           <FeedPanel

+             username={this.props.username}

+             tabTitle="My Stream"

+             >

+             <ItemsGetter

+               url={this.props.urls.content}

+               sse={{

+                 eventName: "hubs:new-notification",

+                 shouldReload: (username) => (username === this.props.username),

+               }}

+               handleData={this.handleStreamData}

+               >

+               <Feed

+                 items={notifs}

+                 handleSave={this.handleSave}

+                 handleUnsave={this.handleUnsave}

+                 />

+             </ItemsGetter>

+           </FeedPanel>

+           <FeedPanel

+             username={this.props.username}

+             tabTitle="My Actions"

+             >

+             <Feed

+               items={this.state.notifItems}

+               />

+           </FeedPanel>

+           <FeedPanel

+             username={this.props.username}

+             tabTitle="Saved Notifications"

+             >

+             <ItemsGetter

+               url={this.props.urls.saved}

+               handleData={this.handleSavedData}

+               >

+               <Feed

+                 items={this.state.savedItems}

+                 handleUnsave={this.handleUnsave}

+                 />

+             </ItemsGetter>

+           </FeedPanel>

+         </TabSet>

+       </IntlProvider>

+     );

+   }

+ }

+ 

+ 

+ class FeedPanel extends React.Component {

+ 

+   render() {

+     const filters_url = `https://apps.fedoraproject.org/notifications/${this.props.username}.id.fedoraproject.org/`;

+ 

+     return (

+       <div>

+         <div className="input-group">

+           <input

+             type="search" className="form-control"

+             placeholder="Search this activity stream..."

+             aria-describedby="searchform-addon" />

+           <span className="input-group-addon">

+             <span><i className="fa fa-search" aria-hidden="true"></i></span>

+           </span>

+         </div>

+         <br/>

+         <div className="alert alert-warning" role="alert">

+           This stream is filtered. <a href={filters_url} target="_blank">View Filters</a>

+         </div>

+         {this.props.children}

+       </div>

+     );

+   }

+ 

+ }

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -0,0 +1,7 @@ 

+ import Widget from './Widget.jsx';

+ 

+ 

+ // Don't use the ES6 "export default" construct:

+ // http://stackoverflow.com/questions/40294870/module-exports-vs-export-default-in-node-js-and-es6

+ 

+ module.exports = {Widget};

@@ -1,53 +0,0 @@ 

- import React from 'react';

- import Panel from '../../components/Panel.jsx';

- 

- 

- class Feed extends React.Component {

- 

-   constructor(props) {

-     super(props);

-     this.state = {

-       matches: this.props.matches,

-       messageLimit: this.props.options.messageLimit,

-       sse: true,

-     };

-     // If it wasn't instantiated with a url or an error happened, bail

-     if (!this.props.url || !this.state.sse) {

-       return;

-     }

-     this.source = (!!window.EventSource) ? new EventSource(this.props.url) : null;

-     if (!this.source) {

-       return;

-     }

-     this.source.addEventListener('error', () => {

-       this.state.sse = false;

-     }, false);

-     window.onbeforeunload = () => {

-       this.source.close();

-     };

-     this.source.onmessage = resp => {

-       const data = JSON.parse(resp.data);

-       if (this.state.matches.length >= this.state.messageLimit) {

-         this.state.matches.pop();

-       }

-       this.state.matches.unshift(data);

-       this.setState({ matches: this.state.matches });

-     };

-   }

- 

-   render() {

-     const feedNodes = this.state.matches.map((match, idx) => {

-       return <Panel match={match} options={this.props.options || {}} key={idx} />;

-     });

-     return (

-       <div>

-         {feedNodes}

-       </div>

-     );

-   }

- 

- }

- 

- // Don't use the ES6 "export default" construct:

- // http://stackoverflow.com/questions/40294870/module-exports-vs-export-default-in-node-js-and-es6

- module.exports = Feed

@@ -0,0 +1,45 @@ 

+ import React from 'react';

+ import ItemsGetter from '../../components/feed/ItemsGetter.jsx';

+ import Feed from '../../components/feed/Feed.jsx';

+ 

+ 

+ export default class Widget extends React.Component {

+ 

+   constructor(props) {

+     super(props);

+     this.state = {

+       items: [],

+       loaded: false,

+     };

+     this.handleServerData = this.handleServerData.bind(this);

+   }

+ 

+   handleServerData(data) {

+     this.setState({

+       items: data,

+       loaded: true,

+     });

+   }

+ 

+   render() {

+     return (

+       <ItemsGetter

+         url={this.props.url}

+         sse={{

+           eventName: "hubs:widget-updated",

+           shouldReload: (idx) => (parseInt(idx) === this.props.widgetIdx),

+         }}

+         handleData={this.handleServerData}

+         >

+         <Feed

+           items={this.state.items}

+           loaded={this.state.loaded}

+           />

+       </ItemsGetter>

+     );

+   }

+ 

+ }

+ 

+ 

+ // vim: set ts=2 sw=2 et:

@@ -9,7 +9,7 @@ 

  const config = {

      entry: {

          Hubs: path.join(PATHS.app, 'core', 'Hubs.js'),

-         Feed: path.join(PATHS.app, 'widgets', 'feed', 'Feed.jsx'),

+         Feed: path.join(PATHS.app, 'widgets', 'feed', 'Feed.js'),

          Halp: path.join(PATHS.app, 'widgets', 'halp', 'Halp.js')

      },

      output: {

file modified
+11
@@ -547,6 +547,17 @@ 

  }

  

  

+ /*

+  * Widget Feed

+  */

+ 

+ .component-feed .media {

+   border-bottom: 1px solid rgba(0, 0, 0, .125);

+ }

+ .component-feed .media:last-child {

+   border: 0

+ }

+ 

  

  /** fedora bootstrap overrides **/

  

file modified
+38
@@ -32,3 +32,41 @@ 

          });

      });

  }

+ 

+ function setup_sse(url) {

+     if (!url || !window.EventSource) {

+       // Auto-update is disabled.

+       return;

+     }

+     var sseSource = new EventSource(url);

+     sseSource.addEventListener("hubs:widget-updated", function(e) {

+         var widget = $("#widget-" + e.data);

+         if (widget.length === 0) { return; }

+         if (widget.find("div.card[data-disable-autoreload]").length !== 0) { return; }

+         $.ajax({

+             url: widget.attr("data-url"),

+             dataType: 'html',

+             success: function(html) {

+                 widget.css("opacity", "0.5");

+                 widget.html(html);

+                 widget.animate({opacity: 1}, 200);

+             },

+             error: function() {

+                 console.log('error updating the widget');

+                 console.trace();

+             },

+         });

+     });

+     // On each message, switch the notification favicon to green for 15s.

+     sseSource.addEventListener("message", function(e) {

+         Notificon('#33ff00');  // green

+         setTimeout(function(){ Notificon(); }, 15000)

+     });

+     // On error, switch the notification favicon to red.

+     sseSource.addEventListener("error", function(e) {

+         Notificon('#eb361e');  // red

+         setTimeout(function(){ Notificon(); }, 15000)

+     });

+     // Make it accessible by other components on the page.

+     document.sseSource = sseSource;

+   }

file removed
-11
@@ -1,11 +0,0 @@ 

- from __future__ import unicode_literals

- 

- import os

- 

- 

- class Stream(object):

-     def get_json(self):

-         here = os.path.dirname(os.path.abspath(__file__))

-         with open(os.path.join(here, 'actions.json')) as fp:

-             actions = fp.read()

-         return actions

file modified
+3 -12
@@ -22,7 +22,7 @@ 

            

    <!-- right side of header -->

    <div class="col-md-{{ hub.config.right_width }} text-center align-self-end">

-     {% if hub.allows(g.auth.user, "config") %}

+     {% if hub.allows(g.user, "config") %}

        <button class="btn btn-sm btn-secondary" data-toggle="modal" data-target="#settings-modal">

          <i class="fa fa-cog" aria-hidden="true"></i>

          hub settings
@@ -323,8 +323,9 @@ 

    visit_counter()

  

    setup_widgets();

+   setup_sse({{ sse_url | tojson }});

    setup_edit_btns();

-   {% if hub.allows(g.auth.user, "config") %}

+   {% if hub.allows(g.user, "config") %}

    setup_settings();

    {% endif %}

  
@@ -332,16 +333,6 @@ 

    make_widget_sortable();

    setup_add_btns();

    {%- endif %}

- 

-   /* This is how to activate and remove (here after 13 sec) the favicon

-   notification */

-   console.log('Show notification');

-   Notificon('#33ff00');

-   setTimeout(function(){console.log('Hide notification'); Notificon()}, 13000)

-   setTimeout(function(){

-       console.log('Show notification'); Notificon('#eb361e')

-     }, 15000)

-   setTimeout(function(){console.log('Hide notification'); Notificon()}, 18000)

  });

  

  </script>

file modified
+5 -4
@@ -112,19 +112,20 @@ 

        <ul class="nav nav-pills flex-lg-column mb-3 rounded" id="vertical-navbar" role="navigation">

          {% if g.auth.logged_in %}

          <li class="nav-item">

-           <a class="nav-link {% if request.path.endswith('/' + g.auth.user.username + '/') %}active{% endif %}"

-              href="{{ url_for("hub", name=g.auth.user.username) }}">

+           <a class="nav-link {% if request.path.endswith('/' + g.user.username + '/') %}active{% endif %}"

+              href="{{ url_for("hub", name=g.user.username) }}">

              <span><i class="fa fa-home" aria-hidden="true"></i></span>

              My Hub

            </a>

          </li>

          <li class="nav-item">

-           <a class="nav-link" href="{{ url_for("stream", name=g.auth.user.username) }}">

+           <a class="nav-link {% if request.path.endswith(url_for("stream")) %}active{% endif %}"

+              href="{{ url_for("stream") }}">

              <span><i class="fa fa-home" aria-hidden="true"></i></span>

              My Stream

            </a>

          </li>

-         {% for hub in g.auth.user.bookmarks %}

+         {% for hub in g.user.bookmarks %}

            <li class='nav-item idle-{{hub.activity_class}}{% if request.path.endswith('/' + hub.name + '/') %} active{% endif %}'>

              <a class="nav-link" href="{{ url_for("hub", name=hub.name) }}">

                <span><i class="fa fa-bookmark" aria-hidden="true"></i></span>

file modified
+10 -88
@@ -13,43 +13,7 @@ 

  

  <div class="row">

    <div class="col-sm-8">

- 

-     <ul class="nav-tabs " role="tablist">

-       <li class="nav-item">

-         <a class="nav-link active" href="#stream" role="tab" data-toggle="tab">My Stream</a>

-       </li>

-       <li class="nav-item">

-         <a class="nav-link" href="#actions" role="tab" data-toggle="tab">My Actions</a>

-       </li>

-       <li class="nav-item">

-         <a class="nav-link" href="#mentions" role="tab" data-toggle="tab">My Mentions</a>

-       </li>

-       <li class="nav-item">

-         <a class="nav-link" href="#saved" role="tab" data-toggle="tab">Saved Notifications</a>

-       </li>

-     </ul>

- 

-     <!-- Tab panes -->

-     <div class="tab-content">

-       <div role="tabpanel" class="tab-pane fade in active" id="stream">

-         {% include "includes/_searchstream.html" %}

-         <div id="streamFeed"></div>

-       </div>

-       <div role="tabpanel" class="tab-pane fade" id="actions">

-         {% include "includes/_searchstream.html" %}

-         <div id="actionsFeed"></div>

-       </div>

-       <div role="tabpanel" class="tab-pane fade" id="mentions">

-         {% include "includes/_searchstream.html" %}

-         <br/>

-         <div id="mentionsFeed">

-           {% include "includes/_messages.html" %}

-         </div>

-       </div>

-       <div role="tabpanel" class="tab-pane fade" id="saved">

-         {% include "includes/_searchstream.html" %}

-         <div id="savedFeed"></div>

-       </div>

+     <div id="streams">

      </div>

    </div>

  
@@ -63,59 +27,17 @@ 

  

  {% block jscripts %}

  {{ super() }}

- <script type="text/javascript"

-   src="{{url_for('static', filename='js/build/Feed.js')}}"></script>

  <script>

-   function setup_feeds(actions, saved, saveUrl) {

-     var streamFeed = React.createElement(Feed, {

-       matches: actions,

-       options: {

-         saveUrl: saveUrl,

-         messageLimit: 100

-       }

-     });

-     /* Right now, stream and actions are the same.

-      * Once, Mentions is implemented, then each will be its own

-      */

-     ReactDOM.render(streamFeed, document.getElementById('streamFeed'));

-     ReactDOM.render(streamFeed, document.getElementById('actionsFeed'));

-   

-     var savedFeed = React.createElement(Feed, {

-       matches: saved,

-       url: false,

-       options: {

-         messageLimit: 100,

-         delete: true,

-         saveUrl: saveUrl

-       }

-     });

-     ReactDOM.render(savedFeed, document.getElementById('savedFeed'));

-     $('a[href="#saved"]').on('shown.bs.tab', function(){

-       $.get(saveUrl).done(function(resp) {

-         if (resp.length != saved.length) {

-           ReactDOM.unmountComponentAtNode(document.getElementById('savedFeed'));

-           var savedFeed = React.createElement(Feed, {

-             matches: resp,

-             url: false,

-             options: {

-               messageLimit: 100,

-               delete: true,

-               saveUrl: saveUrl

-             }

-           });

-           ReactDOM.render(savedFeed, document.getElementById('savedFeed'));

-         }

-       });

-     });

-   }

- 

   $(function() {

-    setup_widgets();

-    setup_feeds(

-        {{ actions|safe }},

-        {{ saved|safe }},

-        {{ url_for("notifications", user=hub.name)|tojson }}

-    );

+     setup_sse({{ sse_url | tojson }});

+     const StreamsElement = React.createElement(Hubs.Streams, {

+         username: {{ g.auth.nickname | tojson }},

+         urls: {

+             saved: {{ url_for("saved_notifs") | tojson }},

+             content: {{ url_for("stream_existing") | tojson }},

+         },

+     });

+     ReactDOM.render(StreamsElement, document.getElementById('streams'));

   });

  </script>

  {% endblock %}

file modified
+20 -1
@@ -138,7 +138,10 @@ 

          g.auth = auth

          g.oidc_id_token = None

          if not auth:

-             g.auth = munch.Munch(logged_in=False, user=None)

+             g.auth = munch.Munch(logged_in=False)

+             g.user = None

+         else:

+             g.user = auth.user

  

      if flask._app_ctx_stack.top is None:

          # App context isn't pushed yet
@@ -202,3 +205,19 @@ 

          yield recorded

      finally:

          template_rendered.disconnect(record, app)

+ 

+ 

+ @contextmanager

+ def app_config(app, config):

+     """Context manager to change the application's configuration

+ 

+     Args:

+         app: The application.

+         config: The configuration to set temporarily.

+     """

+     original_config = app.config.__dict__

+     app.config.update(config)

+     try:

+         yield

+     finally:

+         app.config.__dict__ = original_config

file modified
+7 -1
@@ -1,7 +1,8 @@ 

  ### Secret key for the Flask application

- SECRET_KEY='<The web application secret key>'

+ SECRET_KEY = '<The web application secret key>'

  

  ### url to the database server:

+ 

  import os

  DB_URL = 'sqlite:///%s/test.db' % (os.path.dirname(os.path.abspath(__file__)))

  #DB_URL='sqlite:////tmp/fedocal_dev.sqlite'
@@ -11,3 +12,8 @@ 

      os.path.abspath(__file__)), 'client_secrets.json')

  

  SITE_ADMINS = ["admin"]

+ 

+ SSE_URL = {

+     "port": "8080",

+     "path": "/sse",

+ }

@@ -0,0 +1,145 @@ 

+ from __future__ import unicode_literals

+ 

+ import json

+ 

+ from mock import Mock, patch

+ 

+ from hubs.app import app

+ from hubs.models import User, Hub, Association

+ from hubs.feed import (

+     Notifications, Activity, format_msg, get_hubs_for_msg,

+     on_new_message, on_new_notification)

+ from hubs.tests import APPTest

+ 

+ 

+ class FeedTest(APPTest):

+ 

+     def setUp(self):

+         super(FeedTest, self).setUp()

+         self.feed_classes = [

+             (Notifications, "notif"),

+             (Activity, "activity"),

+             ]

+ 

+     def tearDown(self):

+         super(FeedTest, self).tearDown()

+ 

+     def test_get(self):

+         for feed_class, msgtype in self.feed_classes:

+             feed = feed_class("testuser")

+             feed.db = Mock()

+             feed.db.lrange.return_value = []

+             feed.get()

+             feed.db.lrange.assert_called_once_with(

+                 "feed|{}|testuser".format(msgtype), 0, -1)

+ 

+     def test_add(self):

+         msg = {

+             "msg_id": "testid",

+             "topic": "test.topic.dummy.value",

+             "timestamp": 123456789,

+             "testkey": "testvalue",

+             }

+         for feed_class, msgtype in self.feed_classes:

+             feed = feed_class("testuser")

+             feed.db = Mock()

+             key = "feed|{}|testuser".format(msgtype)

+             feed.add(msg)

+             feed.db.lpush.assert_called_once()

+             lpush_call_args = feed.db.lpush.call_args_list[0][0]

+             self.assertEqual(lpush_call_args[0], key)

+             if feed_class == Notifications:

+                 self.assertIn("msg_ids", json.loads(lpush_call_args[1]))

+             elif feed_class == Activity:

+                 self.assertEqual(json.loads(lpush_call_args[1]), msg)

+             feed.db.ltrim.assert_called_once_with(key, 0, 100)

+ 

+     def test_length(self):

+         for feed_class, msgtype in self.feed_classes:

+             feed = feed_class("testuser")

+             feed.db = Mock()

+             feed.db.llen.return_value = 42

+             self.assertEqual(feed.length(), 42)

+             feed.db.llen.assert_called_once_with(

+                 "feed|{}|testuser".format(msgtype))

+ 

+     def test_close(self):

+         for feed_class, msgtype in self.feed_classes:

+             feed = feed_class("testuser")

+             redis_mock = feed.db = Mock()

+             feed.close()

+             redis_mock.close.assert_called_once()

+             self.assertIsNone(feed.db)

+             self.assertIsNone(feed.connection_pool)

+ 

+     @patch("hubs.feed.Activity")

+     def test_on_new_message(self, mock_activity):

+         msg = {"_hubs": ["testhub1", "testhub2"]}

+         feed = Mock()

+         mock_activity.return_value = feed

+         on_new_message(msg)

+         call_args = [c[0] for c in mock_activity.call_args_list]

+         self.assertEqual(call_args, [("testhub1", ), ("testhub2", )])

+         self.assertEqual(feed.add.call_count, 2)

+         feed.add.assert_called_with(msg)

+ 

+     @patch("hubs.feed.Notifications")

+     def test_on_new_notification(self, mock_notifications):

+         feed = Mock()

+         mock_notifications.return_value = feed

+         msg = {"msg_id": "testmsg"}

+         with patch("hubs.feed.fedmsg.meta.msg2agent") as msg2agent:

+             msg2agent.return_value = "ralph"

+             on_new_notification(msg)

+         msg2agent.assert_called()

+         mock_notifications.assert_called_with("ralph")

+         feed.add.assert_called_with(msg)

+ 

+     @patch("hubs.feed.Notifications")

+     def test_on_new_notification_unknown_user(self, mock_notifications):

+         with patch("hubs.feed.fedmsg.meta.msg2agent") as msg2agent:

+             msg2agent.return_value = "unknown_user"

+             on_new_notification({"msg_id": "testmsg"})

+         msg2agent.assert_called()

+         # User does not exist, no feed instance should have been created.

+         mock_notifications.assert_not_called()

+ 

+     @patch("hubs.feed.fedmsg.meta.msg2usernames")

+     def test_get_hubs_for_msg(self, msg2usernames):

+         ralph = User.query.get("ralph")

+         infra = Hub.query.get("infra")

+         test_hub = Hub(name="testhub")

+         self.session.add(test_hub)

+         self.session.add(Association(hub=test_hub, user=ralph, role="member"))

+         self.session.add(Association(hub=infra, user=ralph, role="owner"))

+         msg2usernames.return_value = ["unknown_user", "ralph"]

+         self.assertListEqual(

+             sorted(get_hubs_for_msg({"msg_id": "testmsg"})),

+             ["infra", "ralph", "testhub"]

+             )

+ 

+     def test_format_msg(self):

+         msg = {

+             "msg_ids": {

+                 "testid1": {"msg_id": "testid1"},

+                 "testid2": {"msg_id": "testid2"},

+                 "testid3": {"msg_id": "testid3"},

+             },

+             "usernames": ["ralph", "decause"],

+             "subtitle": "ralph's ticket was commented by decause",

+             "subjective": "your ticket was commented by decause",

+         }

+         with app.test_request_context():

+             result = format_msg(msg)

+         self.assertEqual(

+             result["dom_id"],

+             "067306d0b091ec48d9e2dded1ca9b1f8b1b2ac93")

+         self.assertEqual(

+             result["markup"],

+             """<a href="/ralph/">ralph</a>'s ticket was commented by """

+             """<a href="/decause/">decause</a>"""

+             )

+         self.assertEqual(

+             result["markup_subjective"],

+             """your ticket was commented by <a href="/decause/">decause</a>"""

+             )

@@ -1,6 +1,7 @@ 

  from __future__ import unicode_literals

  

- from hubs.widgets.caching import cache, CachedFunction

+ from hubs.utils.cache import cache

+ from hubs.widgets.caching import CachedFunction

  

  from mock import Mock

  from hubs.tests import APPTest

empty or binary file added
@@ -0,0 +1,50 @@ 

+ from __future__ import unicode_literals

+ 

+ import dogpile

+ import dogpile.cache

+ from mock import Mock, patch

+ 

+ from hubs.tests import APPTest

+ from hubs.utils.cache import cache

+ from hubs.utils.pkgdb import get_owned_packages

+ 

+ 

+ class PkgdbUtilsTest(APPTest):

+ 

+     def setUp(self):

+         super(PkgdbUtilsTest, self).setUp()

+         # Use a memory backend, not the default null backend, or we can't test

+         # the caching feature.

+         cache.configure(backend='dogpile.cache.memory',

+                         replace_existing_backend=True)

+         cache.delete("owned_packages:abompard")

+ 

+     def _is_cached(self):

+         result = cache.get("owned_packages:abompard", ignore_expiration=True)

+         return not isinstance(result, dogpile.cache.api.NoValue)

+ 

+     def test_get_owned_packages(self):

+         pkgs = get_owned_packages("abompard", use_cache=False)

+         self.assertEqual(len(pkgs), 42)  # not kidding! :-)

+         for pkg in pkgs:

+             self.assertEqual(type(pkg), type(""))

+         self.assertFalse(self._is_cached())

+ 

+     def test_get_owned_packages_invalid(self):

+         pkgs = get_owned_packages("abompard", use_cache=False)

+         self.assertListEqual(pkgs, [])

+ 

+     @patch("hubs.utils.pkgdb.requests")

+     def test_get_owned_packages_cache(self, requests):

+         response = Mock()

+         response.json.return_value = {

+             "point of contact": [],

+             "co-maintained": [],

+         }

+         requests.get.return_value = response

+         get_owned_packages("abompard", use_cache=True)

+         self.assertTrue(self._is_cached())

+         self.assertEqual(requests.get.call_count, 1)

+         get_owned_packages("abompard", use_cache=True)

+         self.assertTrue(self._is_cached())

+         self.assertEqual(requests.get.call_count, 1)

hubs/tests/utils/test_views.py hubs/tests/test_view_utils.py
file renamed
+47 -2
@@ -1,9 +1,11 @@ 

  from __future__ import unicode_literals

  

+ import flask

+ 

  import hubs.models

  from hubs.app import app

- from hubs.tests import APPTest, FakeAuthorization, auth_set

- from hubs.utils.views import get_visible_widgets

+ from hubs.tests import APPTest, FakeAuthorization, auth_set, app_config

+ from hubs.utils.views import get_visible_widgets, get_sse_url

  

  

  class ViewUtilsTest(APPTest):
@@ -84,3 +86,46 @@ 

              widgets = get_visible_widgets(hub)

          self.assertNotEqual(len(widgets["left"]), 0)

          self.assertNotEqual(len(widgets["right"]), 0)

+ 

+     def test_sse_url(self):

+         with app.test_request_context():

+             self.assertEqual(get_sse_url(""), "http://localhost:8080/sse/")

+ 

+         with app.test_request_context():

+             self.assertEqual(

+                 get_sse_url("hub/ralph"),

+                 "http://localhost:8080/sse/hub/ralph")

+ 

+         with app.test_request_context(base_url='http://hubs.example.com/'):

+             self.assertEqual(

+                 get_sse_url("hub/ralph"),

+                 "http://hubs.example.com:8080/sse/hub/ralph")

+ 

+         with app.test_request_context(base_url='http://example.com/hubs'):

+             self.assertEqual(flask.url_for("index"), "/hubs/")  # test validity

+             self.assertEqual(

+                 get_sse_url("hub/ralph"),

+                 "http://example.com:8080/sse/hub/ralph")

+ 

+         with app.test_request_context(base_url='http://hubs.example.com/'):

+             with app_config(app, {

+                     "SSE_URL": {"host": "hubs-sse.example.com"}

+                     }):

+                 self.assertEqual(

+                     get_sse_url("hub/ralph"),

+                     "http://hubs-sse.example.com/hub/ralph")

+ 

+         with app.test_request_context(base_url='http://hubs.example.com/'):

+             with app_config(app, {

+                     "SSE_URL": {"host": "hubs-sse.example.com",

+                                 "port": 8080}

+                     }):

+                 self.assertEqual(

+                     get_sse_url("hub/ralph"),

+                     "http://hubs-sse.example.com:8080/hub/ralph")

+ 

+         with app.test_request_context(base_url='http://localhost/hubs'):

+             with app_config(app, {"SSE_URL": {"host": "example.com"}}):

+                 self.assertEqual(

+                     get_sse_url("hub/ralph"),

+                     "http://example.com/hub/ralph")

@@ -0,0 +1,451 @@ 

+ interactions:

+ - request:

+     body: null

+     headers:

+       Accept: ['*/*']

+       Accept-Encoding: ['gzip, deflate']

+       Connection: [keep-alive]

+       User-Agent: [python-requests/2.13.0]

+     method: GET

+     uri: https://admin.fedoraproject.org/pkgdb/api/packager/package/abompard

+   response:

+     body: {string: "{\n  \"co-maintained\": [\n    {\n      \"acls\": [],\n      \"\

+         creation_date\": 1430239258.0,\n      \"description\": \"contextlib2 is a\

+         \ backport of the standard library's contextlib module to\\nearlier Python\

+         \ versions.\\n\\nIt also serves as a real world proving ground for possible\

+         \ future\\nenhancements to the standard library version.\",\n      \"koschei_monitor\"\

+         : false,\n      \"monitor\": true,\n      \"name\": \"python-contextlib2\"\

+         ,\n      \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1210978\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Backports and enhancements\

+         \ for the contextlib module\",\n      \"upstream_url\": \"https://pypi.io/project/contextlib2\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"This is a lib package for use by pytest-cov, nose-cov\

+         \ and nose2-cov. If\\nyou're developing a coverage plugin for a test framework\

+         \ then you probably\\nwant one of those.\",\n      \"koschei_monitor\": false,\n\

+         \      \"monitor\": false,\n      \"name\": \"python-cov-core\",\n      \"\

+         namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\": \"\

+         Approved\",\n      \"summary\": \"Plugin core for use by pytest-cov, nose-cov\

+         \ and nose2-cov\",\n      \"upstream_url\": \"http://bitbucket.org/memedough/cov-core/overview\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"Coverage.py is a Python module that measures code\

+         \ coverage during Python\\nexecution. It uses the code analysis tools and\

+         \ tracing hooks provided in the\\nPython standard library to determine which\

+         \ lines are executable, and which\\nhave been executed.\",\n      \"koschei_monitor\"\

+         : false,\n      \"monitor\": true,\n      \"name\": \"python-coverage\",\n\

+         \      \"namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"Code coverage testing module for Python\"\

+         ,\n      \"upstream_url\": \"http://nedbatchelder.com/code/modules/coverage.html\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1437659169.0,\n\

+         \      \"description\": \"DDT (Data-Driven Tests) allows you to multiply one\

+         \ test case by running it with\\ndifferent test data, and make it appear as\

+         \ multiple test cases. It is used in\\ncombination with other testing frameworks\

+         \ like unittest and nose.\",\n      \"koschei_monitor\": true,\n      \"monitor\"\

+         : true,\n      \"name\": \"python-ddt\",\n      \"namespace\": \"rpms\",\n\

+         \      \"review_url\": \"https://bugzilla.redhat.com/1244014\",\n      \"\

+         status\": \"Approved\",\n      \"summary\": \"Python library to multiply test\

+         \ cases\",\n      \"upstream_url\": \"https://github.com/txels/ddt\"\n   \

+         \ },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"Falcon is a high-performance Python framework for\

+         \ building cloud APIs.\\nIt encourages the REST architectural style, and tries\

+         \ to do as little as\\npossible while remaining highly effective.\",\n   \

+         \   \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-falcon\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"An unladen\

+         \ web framework for building APIs and app backends\",\n      \"upstream_url\"\

+         : \"https://falconframework.org\"\n    },\n    {\n      \"acls\": [],\n  \

+         \    \"creation_date\": 1400070978.0,\n      \"description\": \"A comprehensive\

+         \ HTTP client library that supports many features left out of\\nother HTTP\

+         \ libraries.\",\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n\

+         \      \"name\": \"python-httplib2\",\n      \"namespace\": \"rpms\",\n  \

+         \    \"review_url\": null,\n      \"status\": \"Approved\",\n      \"summary\"\

+         : \"A comprehensive HTTP client library\",\n      \"upstream_url\": \"https://pypi.python.org/pypi/httplib2\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"This module provides basic functions for parsing\

+         \ mime-type names\\nand matching them against a list of media-ranges.\",\n\

+         \      \"koschei_monitor\": false,\n      \"monitor\": false,\n      \"name\"\

+         : \"python-mimeparse\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"Python module\

+         \ for parsing mime-type names\",\n      \"upstream_url\": \"https://github.com/dbtsai/python-mimeparse\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"Passlib is a password hashing library for Python\

+         \ 2 & 3, which provides\\ncross-platform implementations of over 20 password\

+         \ hashing algorithms,\\nas well as a framework for managing existing password\

+         \ hashes. It's\\ndesigned to be useful for a wide range of tasks, from verifying\

+         \ a hash\\nfound in /etc/shadow, to providing full-strength password hashing\

+         \ for\\nmulti-user application.\",\n      \"koschei_monitor\": false,\n  \

+         \    \"monitor\": true,\n      \"name\": \"python-passlib\",\n      \"namespace\"\

+         : \"rpms\",\n      \"review_url\": null,\n      \"status\": \"Approved\",\n\

+         \      \"summary\": \"Comprehensive password hashing framework supporting\

+         \ over 20 schemes\",\n      \"upstream_url\": \"https://bitbucket.org/ecollins/passlib\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1462278195.0,\n\

+         \      \"description\": \"A fork of SocksiPy with bug fixes and extra features.\\\

+         n\\nActs as a drop-in replacement to the socket module. Featuring:\\n\\n-\

+         \ SOCKS proxy client for Python 2.6 - 3.x\\n- TCP and UDP both supported\\\

+         n- HTTP proxy client included but not supported or recommended (you should\

+         \ use\\n  urllib2's or requests' own HTTP proxy interface)\\n- urllib2 handler\

+         \ included.\",\n      \"koschei_monitor\": true,\n      \"monitor\": \"nobuild\"\

+         ,\n      \"name\": \"python-pysocks\",\n      \"namespace\": \"rpms\",\n \

+         \     \"review_url\": \"https://bugzilla.redhat.com/1332206\",\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"A Python SOCKS client module\",\n  \

+         \    \"upstream_url\": \"https://github.com/Anorov/PySocks\"\n    },\n   \

+         \ {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"\

+         description\": \"testtools is a set of extensions to the Python standard library's\

+         \ unit testing\\nframework.\",\n      \"koschei_monitor\": false,\n      \"\

+         monitor\": true,\n      \"name\": \"python-testtools\",\n      \"namespace\"\

+         : \"rpms\",\n      \"review_url\": null,\n      \"status\": \"Approved\",\n\

+         \      \"summary\": \"Extensions to the Python unit testing framework\",\n\

+         \      \"upstream_url\": \"https://launchpad.net/testtools\"\n    },\n   \

+         \ {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"\

+         description\": \"This package represents the core of the Zope Component Architecture.\\\

+         nTogether with the 'zope.interface' package, it provides facilities for\\\

+         ndefining, registering and looking up components.\",\n      \"koschei_monitor\"\

+         : false,\n      \"monitor\": true,\n      \"name\": \"python-zope-component\"\

+         ,\n      \"namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"Zope Component Architecture\",\n   \

+         \   \"upstream_url\": \"https://pypi.io/project/zope.component\"\n    },\n\

+         \    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n    \

+         \  \"description\": \"The zope configuration system provides an extensible\

+         \ system for supporting\\nvarious kinds of configurations.\\n\\nIt is based\

+         \ on the idea of configuration directives. Users of the configuration\\nsystem\

+         \ provide configuration directives in some language that express\\nconfiguration\

+         \ choices. The intent is that the language be pluggable. An XML\\nlanguage\

+         \ is provided by default.\",\n      \"koschei_monitor\": true,\n      \"monitor\"\

+         : true,\n      \"name\": \"python-zope-configuration\",\n      \"namespace\"\

+         : \"rpms\",\n      \"review_url\": null,\n      \"status\": \"Approved\",\n\

+         \      \"summary\": \"Zope Configuration Markup Language (ZCML)\",\n     \

+         \ \"upstream_url\": \"https://github.com/zopefoundation/zope.configuration\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"The zope.event package provides a simple event system.\

+         \ It provides\\nan event publishing system and a very simple event-dispatching\

+         \ system\\non which more sophisticated event dispatching systems can be built.\\\

+         n(For example, a type-based event dispatching system that builds on\\nzope.event\

+         \ can be found in zope.component.)\",\n      \"koschei_monitor\": false,\n\

+         \      \"monitor\": true,\n      \"name\": \"python-zope-event\",\n      \"\

+         namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\": \"\

+         Approved\",\n      \"summary\": \"Zope Event Publication\",\n      \"upstream_url\"\

+         : \"http://pypi.python.org/pypi/zope.event/\"\n    },\n    {\n      \"acls\"\

+         : [],\n      \"creation_date\": 1400070978.0,\n      \"description\": \"This\

+         \ package contains exception interfaces and implementations which are so\\\

+         ngeneral purpose that they don't belong in Zope application-specific packages.\"\

+         ,\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-zope-exceptions\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"Zope Exceptions\"\

+         ,\n      \"upstream_url\": \"http://pypi.python.org/pypi/zope.exceptions\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"This module provides message identifiers for internationalization.\"\

+         ,\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-zope-i18nmessageid\",\n      \"namespace\": \"rpms\",\n      \"\

+         review_url\": null,\n      \"status\": \"Approved\",\n      \"summary\": \"\

+         Message Identifiers for internationalization\",\n      \"upstream_url\": \"\

+         http://pypi.python.org/pypi/zope.i18nmessageid\"\n    },\n    {\n      \"\

+         acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"description\"\

+         : \"This package is a zope.interface extension for defining data schemas.\"\

+         ,\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-zope-schema\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"Zope 3 schemas\"\

+         ,\n      \"upstream_url\": \"http://pypi.python.org/pypi/zope.schema\"\n \

+         \   },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"This package provides a number of testing frameworks.\

+         \ It includes a\\nflexible test runner, and supports both doctest and unittest.\"\

+         ,\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-zope-testing\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"Zope Testing\

+         \ Framework\",\n      \"upstream_url\": \"https://pypi.io/project/zope.testing\"\

+         \n    }\n  ],\n  \"output\": \"ok\",\n  \"point of contact\": [\n    {\n \

+         \     \"acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"description\"\

+         : \"Grisbi is a very functional personal financial management program\\nwith\

+         \ a lot of features: checking, cash and liabilities accounts,\\nseveral accounts\

+         \ with automatic contra entries, several currencies,\\nincluding euro, arbitrary\

+         \ currency for every operation, money\\ninterchange fees, switch to euro account\

+         \ per account, description\\nof the transactions with third parties, categories,\

+         \ sub-categories,\\nfinancial year, notes, breakdown, transfers between accounts,\

+         \ even\\nfor accounts of different currencies, bank reconciliation, scheduled\\\

+         ntransactions, automatic recall of last transaction for every third\\nparty,\

+         \ nice and easy user interface, user manual, QIF import/export.\",\n     \

+         \ \"koschei_monitor\": false,\n      \"monitor\": false,\n      \"name\":\

+         \ \"grisbi\",\n      \"namespace\": \"rpms\",\n      \"review_url\": null,\n\

+         \      \"status\": \"Approved\",\n      \"summary\": \"Personal finances manager\"\

+         ,\n      \"upstream_url\": \"http://www.grisbi.org\"\n    },\n    {\n    \

+         \  \"acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"description\"\

+         : \"KeePassX is an application for people with extremly high demands on secure\\\

+         npersonal data management.\\nKeePassX saves many different information e.g.\

+         \ user names, passwords, urls,\\nattachemts and comments in one single database.\

+         \ For a better management\\nuser-defined titles and icons can be specified\

+         \ for each single entry.\\nFurthermore the entries are sorted in groups, which\

+         \ are customizable as well.\\nThe integrated search function allows to search\

+         \ in a single group or the\\ncomplete database.\\nKeePassX offers a little\

+         \ utility for secure password generation. The password\\ngenerator is very\

+         \ customizable, fast and easy to use. Especially someone who\\ngenerates passwords\

+         \ frequently will appreciate this feature.\\nThe complete database is always\

+         \ encrypted either with AES (alias Rijndael) or\\nTwofish encryption algorithm\

+         \ using a 256 bit key. Therefore the saved\\ninformation can be considered\

+         \ as quite safe. KeePassX uses a database format\\nthat is compatible with\

+         \ KeePass Password Safe v2 for MS Windows.\",\n      \"koschei_monitor\":\

+         \ false,\n      \"monitor\": false,\n      \"name\": \"keepassx\",\n     \

+         \ \"namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"Cross-platform password manager\",\n\

+         \      \"upstream_url\": \"http://www.keepassx.org/\"\n    },\n    {\n   \

+         \   \"acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"description\"\

+         : \"A ssh-add helper that uses kwallet and kpassworddialog.\",\n      \"koschei_monitor\"\

+         : false,\n      \"monitor\": true,\n      \"name\": \"ksshaskpass\",\n   \

+         \   \"namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"A ssh-add helper that uses kwallet and\

+         \ kpassworddialog\",\n      \"upstream_url\": \"https://cgit.kde.org/ksshaskpass.git\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1474548104.0,\n\

+         \      \"description\": \"Libsass is a C/C++ port of the Sass CSS precompiler.\

+         \ The original version was\\nwritten in Ruby, but this version is meant for\

+         \ efficiency and portability.\\n\\nThis library strives to be light, simple,\

+         \ and easy to build and integrate with\\na variety of platforms and languages.\\\

+         n\\nLibsass is just a library, but if you want to RUN libsass, install the\

+         \ sassc\\npackage.\",\n      \"koschei_monitor\": true,\n      \"monitor\"\

+         : true,\n      \"name\": \"libsass\",\n      \"namespace\": \"rpms\",\n  \

+         \    \"review_url\": \"https://bugzilla.redhat.com/1369534\",\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"C/C++ port of the Sass CSS precompiler\"\

+         ,\n      \"upstream_url\": \"http://sass-lang.com/libsass\"\n    },\n    {\n\

+         \      \"acls\": [],\n      \"creation_date\": 1496666386.0,\n      \"description\"\

+         : \"This is a server for SMTP and related protocols, similar in utility\\\

+         r\\nto the standard library\\u2019s smtpd.py module, but rewritten to be based\\\

+         r\\non asyncio for Python 3.\",\n      \"koschei_monitor\": true,\n      \"\

+         monitor\": true,\n      \"name\": \"python-aiosmtpd\",\n      \"namespace\"\

+         : \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1404883\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Asyncio-based SMTP\

+         \ server\",\n      \"upstream_url\": \"https://github.com/aio-libs/aiosmtpd\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1496666324.0,\n\

+         \      \"description\": \"This is a very simple decorator and function which\

+         \ populates a \\r\\nmodule's __all__ and optionally the module globals. \\\

+         r\\nThis provides both a pure-Python implementation and a C implementation.\

+         \ \\r\\nIt is proposed that the C implementation be added to built-ins for\

+         \ \\r\\nPython 3.6.\",\n      \"koschei_monitor\": true,\n      \"monitor\"\

+         : true,\n      \"name\": \"python-atpublic\",\n      \"namespace\": \"rpms\"\

+         ,\n      \"review_url\": \"https://bugzilla.redhat.com/1404882\",\n      \"\

+         status\": \"Approved\",\n      \"summary\": \"Decorator for populating a Python\

+         \ module's __all__\",\n      \"upstream_url\": \"http://public.readthedocs.io\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1496666808.0,\n\

+         \      \"description\": \"Blessed is a thin, practical wrapper around terminal\

+         \ styling, screen\\r\\npositioning, and keyboard input.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-blessed\",\n \

+         \     \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1435986\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"A thin, practical\

+         \ wrapper around terminal capabilities in Python\",\n      \"upstream_url\"\

+         : \"https://pypi.python.org/pypi/blessed\"\n    },\n    {\n      \"acls\"\

+         : [],\n      \"creation_date\": 1476188786.0,\n      \"description\": \"The\

+         \ flufl.bounce library provides a set of heuristics and an API for\\ndetecting\

+         \ the original bouncing email addresses from a bounce message.  Many\\nformats\

+         \ found in the wild are supported, as are VERP and RFC 3464 (DSN).\",\n  \

+         \    \"koschei_monitor\": true,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-flufl-bounce\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : \"https://bugzilla.redhat.com/1375999\",\n      \"status\": \"Approved\"\

+         ,\n      \"summary\": \"Email bounce detectors\",\n      \"upstream_url\"\

+         : \"https://gitlab.com/warsaw/flufl.bounce\"\n    },\n    {\n      \"acls\"\

+         : [],\n      \"creation_date\": 1476188935.0,\n      \"description\": \"The\

+         \ ``flufl.i18n`` library provides a convenient API for managing translation\\\

+         ncontexts in Python applications. It provides facilities not only for\\nsingle-context\

+         \ applications like command line scripts, but also more\\nsophisticated management\

+         \ of multiple-context applications such as Internet\\nservers.\",\n      \"\

+         koschei_monitor\": true,\n      \"monitor\": true,\n      \"name\": \"python-flufl-i18n\"\

+         ,\n      \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1375993\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"A high level API\

+         \ for Python internationalization\",\n      \"upstream_url\": \"https://gitlab.com/warsaw/flufl.i18n\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1476188827.0,\n\

+         \      \"description\": \"The flufl.lock library provides an NFS-safe file-based\

+         \ locking algorithm\\ninfluenced by the GNU/Linux \\\"open(2)\\\" man page,\

+         \ under the description of\\nthe \\\"O_EXCL\\\" option.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-flufl-lock\",\n\

+         \      \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1376001\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"NFS-safe file locking\

+         \ with timeouts for POSIX systems\",\n      \"upstream_url\": \"https://gitlab.com/warsaw/flufl.lock\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1481720827.0,\n\

+         \      \"description\": \"This package contains a small collection of test\

+         \ helpers that Barry Warsaw\\nuses in almost all his packages. Specifically,\

+         \ plugins for the following\\ntest tools are provided:\\n- nose2\\n- flake8\\\

+         nPython 3.4 is the minimum supported version.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-flufl-testing\"\

+         ,\n      \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1401582\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Small collection\

+         \ of test tool plugins\",\n      \"upstream_url\": \"https://gitlab.com/warsaw/flufl.testing\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1481720722.0,\n\

+         \      \"description\": \"The LAZR config system is typically used to manage\

+         \ process configuration.\\nProcess configuration is for saying how things\

+         \ change when we run systems on\\ndifferent machines, or under different circumstances.\\\

+         n\\nThis system uses ini-like file format of section, keys, and values. The\

+         \ config\\nfile supports inheritance to minimize duplication of information\

+         \ across files.\\nThe format supports schema validation.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-lazr-config\"\

+         ,\n      \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1387518\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Create configuration\

+         \ schemas, and process and validate configurations\",\n      \"upstream_url\"\

+         : \"https://launchpad.net/lazr.config\"\n    },\n    {\n      \"acls\": [],\n\

+         \      \"creation_date\": 1477410675.0,\n      \"description\": \"The lazr.delegates\

+         \ package makes it easy to write objects that delegate\\nbehavior to another\

+         \ object. The new object adds some property or behavior on\\nto the other\

+         \ object, while still providing the underlying interface, and\\ndelegating\

+         \ behavior.\",\n      \"koschei_monitor\": true,\n      \"monitor\": true,\n\

+         \      \"name\": \"python-lazr-delegates\",\n      \"namespace\": \"rpms\"\

+         ,\n      \"review_url\": \"https://bugzilla.redhat.com/1387291\",\n      \"\

+         status\": \"Approved\",\n      \"summary\": \"Easily write objects that delegate\

+         \ behavior\",\n      \"upstream_url\": \"https://launchpad.net/lazr.delegates\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1479219121.0,\n\

+         \      \"description\": \"This is LAZR smtptest, a framework for testing SMTP-based\

+         \ applications and\\nlibraries. It provides a real, live SMTP server that\

+         \ you can send messages\\nto, and from which you can read those test messages.\

+         \ This can be used to\\nensure proper operation of your applications which\

+         \ send email.\",\n      \"koschei_monitor\": true,\n      \"monitor\": true,\n\

+         \      \"name\": \"python-lazr-smtptest\",\n      \"namespace\": \"rpms\"\

+         ,\n      \"review_url\": \"https://bugzilla.redhat.com/1387250\",\n      \"\

+         status\": \"Approved\",\n      \"summary\": \"Test framework for SMTP-based\

+         \ applications\",\n      \"upstream_url\": \"https://launchpad.net/lazr.smtptest\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"Mako is a template library written in Python. It\

+         \ provides a familiar, non-XML\\nsyntax which compiles into Python modules\

+         \ for maximum performance. Mako's\\nsyntax and API borrows from the best ideas\

+         \ of many others, including Django\\ntemplates, Cheetah, Myghty, and Genshi.\

+         \ Conceptually, Mako is an embedded\\nPython (i.e. Python Server Page) language,\

+         \ which refines the familiar ideas of\\ncomponentized layout and inheritance\

+         \ to produce one of the most straightforward\\nand flexible models available,\

+         \ while also maintaining close ties to Python\\ncalling and scoping semantics.\"\

+         ,\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-mako\",\n      \"namespace\": \"rpms\",\n      \"review_url\":\

+         \ null,\n      \"status\": \"Approved\",\n      \"summary\": \"Mako template\

+         \ library for Python\",\n      \"upstream_url\": \"http://www.makotemplates.org/\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1474373839.0,\n\

+         \      \"description\": \"nose2 is the next generation of nicer testing for\

+         \ Python, based on the plugins\\nbranch of unittest2. nose2 aims to improve\

+         \ on nose by:\\n- providing a better plugin API\\n- being easier for users\

+         \ to configure\\n- simplifying internal interfaces and processes\\n- supporting\

+         \ Python 2 and 3 from the same codebase, without translation\\n- encouraging\

+         \ greater community involvement in its development\\n\\nIn service of some\

+         \ those goals, some features of nose will not be supported in\\nnose2. See\

+         \ the documentation for a thorough rundown.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-nose2\",\n   \

+         \   \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1375926\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Next generation of\

+         \ nicer testing for Python\",\n      \"upstream_url\": \"https://nose2.readthedocs.org\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"pylibmc is a client in Python for memcached. It\

+         \ is a wrapper\\naround TangentOrg\\u2018s libmemcached library. The interface\

+         \ is\\nintentionally made as close to python-memcached as possible,\\nso that\

+         \ applications can drop-in replace it.\",\n      \"koschei_monitor\": false,\n\

+         \      \"monitor\": false,\n      \"name\": \"python-pylibmc\",\n      \"\

+         namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\": \"\

+         Approved\",\n      \"summary\": \"Memcached client for Python\",\n      \"\

+         upstream_url\": \"http://sendapatch.se/projects/pylibmc/\"\n    },\n    {\n\

+         \      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n      \"description\"\

+         : \"Most existing Python modules for sending HTTP requests are extremely verbose\

+         \ and\\ncumbersome. Python\\u2019s built-in urllib2 module provides most of\

+         \ the HTTP\\ncapabilities you should need, but the API is thoroughly broken.\

+         \ This library is\\ndesigned to make HTTP requests easy for developers.\"\

+         ,\n      \"koschei_monitor\": false,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-requests\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"HTTP library,\

+         \ written in Python, for human beings\",\n      \"upstream_url\": \"https://pypi.io/project/requests\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"Tornado is an open source version of the scalable,\

+         \ non-blocking web\\nserver and tools.\\n\\nThe framework is distinct from\

+         \ most mainstream web server frameworks\\n(and certainly most Python frameworks)\

+         \ because it is non-blocking and\\nreasonably fast. Because it is non-blocking\

+         \ and uses epoll, it can\\nhandle thousands of simultaneous standing connections,\

+         \ which means it is\\nideal for real-time web services.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-tornado\",\n \

+         \     \"namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\"\

+         : \"Approved\",\n      \"summary\": \"Scalable, non-blocking web server and\

+         \ tools\",\n      \"upstream_url\": \"http://www.tornadoweb.org\"\n    },\n\

+         \    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n    \

+         \  \"description\": \"Python HTTP module with connection pooling and file\

+         \ POST abilities.\",\n      \"koschei_monitor\": false,\n      \"monitor\"\

+         : true,\n      \"name\": \"python-urllib3\",\n      \"namespace\": \"rpms\"\

+         ,\n      \"review_url\": null,\n      \"status\": \"Approved\",\n      \"\

+         summary\": \"Python HTTP library with thread-safe connection pooling and file\

+         \ post\",\n      \"upstream_url\": \"https://github.com/shazow/urllib3\"\n\

+         \    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1400070978.0,\n\

+         \      \"description\": \"Werkzeug\\n========\\n\\nWerkzeug started as simple\

+         \ collection of various utilities for WSGI\\napplications and has become one\

+         \ of the most advanced WSGI utility\\nmodules.  It includes a powerful debugger,\

+         \ full featured request and\\nresponse objects, HTTP utilities to handle entity\

+         \ tags, cache control\\nheaders, HTTP dates, cookie handling, file uploads,\

+         \ a powerful URL\\nrouting system and a bunch of community contributed addon\

+         \ modules.\\n\\nWerkzeug is unicode aware and doesn't enforce a specific template\\\

+         nengine, database adapter or anything else.  It doesn't even enforce\\na specific\

+         \ way of handling requests and leaves all that up to the\\ndeveloper. It's\

+         \ most useful for end user applications which should work\\non as many server\

+         \ environments as possible (such as blogs, wikis,\\nbulletin boards, etc.).\"\

+         ,\n      \"koschei_monitor\": true,\n      \"monitor\": true,\n      \"name\"\

+         : \"python-werkzeug\",\n      \"namespace\": \"rpms\",\n      \"review_url\"\

+         : null,\n      \"status\": \"Approved\",\n      \"summary\": \"The Swiss Army\

+         \ knife of Python web development\",\n      \"upstream_url\": \"http://werkzeug.pocoo.org/\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1465852253.0,\n\

+         \      \"description\": \"This package provides a flexible test runner with\

+         \ layer support.\",\n      \"koschei_monitor\": true,\n      \"monitor\":\

+         \ true,\n      \"name\": \"python-zope-testrunner\",\n      \"namespace\"\

+         : \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1341815\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Zope testrunner script\"\

+         ,\n      \"upstream_url\": \"https://pypi.python.org/pypi/zope.testrunner\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1481116746.0,\n\

+         \      \"description\": \"The zope.event package provides a simple event system.\

+         \ It provides\\nan event publishing system and a very simple event-dispatching\

+         \ system\\non which more sophisticated event dispatching systems can be built.\\\

+         n(For example, a type-based event dispatching system that builds on\\nzope.event\

+         \ can be found in zope.component.)\",\n      \"koschei_monitor\": true,\n\

+         \      \"monitor\": true,\n      \"name\": \"python3-zope-event\",\n     \

+         \ \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1395244\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Zope Event Publication\"\

+         ,\n      \"upstream_url\": \"http://pypi.python.org/pypi/zope.event/\"\n \

+         \   },\n    {\n      \"acls\": [],\n      \"creation_date\": 1481289213.0,\n\

+         \      \"description\": \"Interfaces are a mechanism for labeling objects\

+         \ as conforming to a given API\\nor contract.\\nThis is a separate distribution\

+         \ of the zope.interface package used in Zope 3.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python3-zope-interface\"\

+         ,\n      \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1395255\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Zope 3 Interface\

+         \ Infrastructure\",\n      \"upstream_url\": \"https://pypi.io/project/zope.interface\"\

+         \n    },\n    {\n      \"acls\": [],\n      \"creation_date\": 1479138409.0,\n\

+         \      \"description\": \"SassC is a wrapper around libsass used to generate\

+         \ a useful command-line\\napplication that can be installed and packaged for\

+         \ several operating systems.\",\n      \"koschei_monitor\": true,\n      \"\

+         monitor\": true,\n      \"name\": \"sassc\",\n      \"namespace\": \"rpms\"\

+         ,\n      \"review_url\": \"https://bugzilla.redhat.com/1369535\",\n      \"\

+         status\": \"Approved\",\n      \"summary\": \"Wrapper around libsass to compile\

+         \ CSS stylesheet\",\n      \"upstream_url\": \"http://github.com/sass/sassc\"\

+         \n    }\n  ],\n  \"watch\": [\n    {\n      \"acls\": [],\n      \"creation_date\"\

+         : 1400070978.0,\n      \"description\": \"Advanced Web Statistics is a powerful\

+         \ and featureful tool that generates\\nadvanced web server graphic statistics.\

+         \ This server log analyzer works\\nfrom command line or as a CGI and shows\

+         \ you all information your log contains,\\nin graphical web pages. It can\

+         \ analyze a lot of web/wap/proxy servers like\\nApache, IIS, Weblogic, Webstar,\

+         \ Squid, ... but also mail or ftp servers.\\n\\nThis program can measure visits,\

+         \ unique vistors, authenticated users, pages,\\ndomains/countries, OS busiest\

+         \ times, robot visits, type of files, search\\nengines/keywords used, visits\

+         \ duration, HTTP errors and more...\\nStatistics can be updated from a browser\

+         \ or your scheduler.\\nThe program also supports virtual servers, plugins\

+         \ and a lot of features.\\n\\nWith the default configuration, the statistics\

+         \ are available:\\nhttp://localhost/awstats/awstats.pl\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"awstats\",\n      \"\

+         namespace\": \"rpms\",\n      \"review_url\": null,\n      \"status\": \"\

+         Approved\",\n      \"summary\": \"Advanced Web Statistics\",\n      \"upstream_url\"\

+         : \"http://awstats.sourceforge.net\"\n    },\n    {\n      \"acls\": [],\n\

+         \      \"creation_date\": 1452111332.0,\n      \"description\": \"This API\

+         \ is mainly for Terminal Emulator implementors, or those writing programs\\\

+         nthat expect to interpreted by a terminal emulator and wish to determine the\\\

+         nprintable width of a string on a Terminal.\",\n      \"koschei_monitor\"\

+         : true,\n      \"monitor\": true,\n      \"name\": \"python-wcwidth\",\n \

+         \     \"namespace\": \"rpms\",\n      \"review_url\": \"https://bugzilla.redhat.com/1295152\"\

+         ,\n      \"status\": \"Approved\",\n      \"summary\": \"Measures number of\

+         \ Terminal column cells of wide-character codes\",\n      \"upstream_url\"\

+         : \"https://github.com/jquast/wcwidth\"\n    }\n  ]\n}"}

+     headers:

+       Accept-Ranges: [bytes]

+       Age: ['0']

+       AppServer: [proxy06.fedoraproject.org]

+       AppTime: [D=293465]

+       Connection: [Keep-Alive]

+       Content-Length: ['30176']

+       Content-Type: [application/json]

+       Date: ['Thu, 15 Jun 2017 15:04:24 GMT']

+       Keep-Alive: ['timeout=15, max=500']

+       Server: [Apache/2.4.6 (Red Hat Enterprise Linux) mod_wsgi/3.4 Python/2.7.5]

+       Set-Cookie: ['pkgdb=eyJfcGVybWFuZW50Ijp0cnVlfQ.DCQzeA.1B01lt3fuPret9GeJCOsCK6JptU;

+           Expires=Thu, 15-Jun-2017 16:04:24 GMT; Secure; HttpOnly; Path=/pkgdb/']

+       Strict-Transport-Security: [max-age=15768000; includeSubDomains; preload]

+       Via: [1.1 varnish-v4]

+       X-Varnish: ['23219393']

+     status: {code: 200, message: OK}

+ version: 1

@@ -0,0 +1,30 @@ 

+ interactions:

+ - request:

+     body: null

+     headers:

+       Accept: ['*/*']

+       Accept-Encoding: ['gzip, deflate']

+       Connection: [keep-alive]

+       User-Agent: [python-requests/2.13.0]

+     method: GET

+     uri: https://admin.fedoraproject.org/pkgdb/api/packager/package/abompard

+   response:

+     body: {string: "this is invalid"}

+     headers:

+       Accept-Ranges: [bytes]

+       Age: ['0']

+       AppServer: [proxy06.fedoraproject.org]

+       AppTime: [D=293465]

+       Connection: [Keep-Alive]

+       Content-Length: ['30176']

+       Content-Type: [application/json]

+       Date: ['Thu, 15 Jun 2017 15:04:24 GMT']

+       Keep-Alive: ['timeout=15, max=500']

+       Server: [Apache/2.4.6 (Red Hat Enterprise Linux) mod_wsgi/3.4 Python/2.7.5]

+       Set-Cookie: ['pkgdb=eyJfcGVybWFuZW50Ijp0cnVlfQ.DCQzeA.1B01lt3fuPret9GeJCOsCK6JptU;

+           Expires=Thu, 15-Jun-2017 16:04:24 GMT; Secure; HttpOnly; Path=/pkgdb/']

+       Strict-Transport-Security: [max-age=15768000; includeSubDomains; preload]

+       Via: [1.1 varnish-v4]

+       X-Varnish: ['23219393']

+     status: {code: 200, message: OK}

+ version: 1

file modified
+39 -21
@@ -3,11 +3,34 @@ 

  import json

  import unittest

  

+ from mock import Mock, patch

+ 

  import hubs.tests

  import hubs.models

  from hubs.app import app

  

  

+ class TestStreamExisting(hubs.tests.APPTest):

+     user = hubs.tests.FakeAuthorization('ralph')

+ 

+     @patch("hubs.feed.Notifications")

+     def test_get(self, Notifications):

+         feed = Mock()

+         Notifications.return_value = feed

+         feed.get.return_value = [{

+             "markup": "foo", "link": "bar",

+         }]

+         with hubs.tests.auth_set(app, self.user):

+             resp = self.app.get('/stream/existing')

+ 

+         self.assertEqual(resp.status_code, 200)

+         data = json.loads(resp.get_data(as_text=True))

+         self.assertEqual(data["status"], "OK")

+         self.assertEqual(len(data["data"]), 1)

+         self.assertEqual(data["data"][0]['markup'], 'foo')

+         self.assertEqual(data["data"][0]['link'], 'bar')

+ 

+ 

  class TestGetNotifications(hubs.tests.APPTest):

      user = hubs.tests.FakeAuthorization('ralph')

  
@@ -15,23 +38,25 @@ 

          name = 'notarealfasuser'

  

          with hubs.tests.auth_set(app, self.user):

-             resp = self.app.get('/{}/notifications/'.format(name))

+             resp = self.app.get('/stream/saved/'.format(name))

          self.assertEqual(resp.status_code, 200)

          data = json.loads(resp.get_data(as_text=True))

-         self.assertEqual(data, {"notifications": []})

+         self.assertEqual(data["status"], "OK")

+         self.assertEqual(len(data["data"]), 1)

+         self.assertEqual(data["data"][0]['markup'], 'foo')

+         self.assertEqual(data["data"][0]['link'], 'bar')

  

      def test_get_notifications_valid_name(self):

          with hubs.tests.auth_set(app, self.user):

-             resp = self.app.get('/{}/notifications/'.format(

+             resp = self.app.get('/stream/saved/'.format(

                  self.user.username))

  

          self.assertEqual(resp.status_code, 200)

          data = json.loads(resp.get_data(as_text=True))

-         self.assertEqual(len(data["notifications"]), 1)

- 

-         for saved in data["notifications"]:

-             self.assertEqual(saved['markup'], 'foo')

-             self.assertEqual(saved['link'], 'bar')

+         self.assertEqual(data["status"], "OK")

+         self.assertEqual(len(data["data"]), 1)

+         self.assertEqual(data["data"][0]['markup'], 'foo')

+         self.assertEqual(data["data"][0]['link'], 'bar')

  

  

  class TestPostNotifications(hubs.tests.APPTest):
@@ -48,18 +73,10 @@ 

          'username': user.username,

      }

  

-     def test_post_notification_invalid_user(self):

-         with hubs.tests.auth_set(app, self.user):

-             resp = self.app.post(

-                 '/{}/notifications/'.format('notarealfasuser'),

-                 data=json.dumps(self.valid_payload),

-                 content_type='application/json')

-         self.assertEqual(resp.status_code, 400)

- 

      def test_post_notification_invalid_payload(self):

          with hubs.tests.auth_set(app, self.user):

              resp = self.app.post(

-                 '/{}/notifications/'.format(self.user.username),

+                 '/stream/saved/',

                  data=json.dumps(self.invalid_payload),

                  content_type='application/json')

          self.assertEqual(resp.status_code, 400)
@@ -67,15 +84,16 @@ 

      def test_post_notification_valid_payload(self):

          with hubs.tests.auth_set(app, self.user):

              resp = self.app.post(

-                 '/{}/notifications/'.format(self.user.username),

+                 '/stream/saved/',

                  data=json.dumps(self.valid_payload),

                  content_type='application/json')

  

          self.assertEqual(resp.status_code, 200)

          data = json.loads(resp.get_data(as_text=True))

          self.assertTrue(isinstance(data, dict))

+         self.assertEqual(data["status"], "OK")

  

-         notification = data['notification']

+         notification = data['data']

          self.assertEqual(notification['markup'], 'foobar')

          self.assertEqual(notification['link'], 'baz')

  
@@ -107,7 +125,7 @@ 

          self.assertIsNotNone(self.notification)

          with hubs.tests.auth_set(app, self.user):

              resp = self.app.delete(

-                 '/{}/notifications/{}/'.format(self.user.username, idx)

+                 '/stream/saved/{}/'.format(idx)

              )

  

          self.assertEqual(resp.status_code, 200)
@@ -121,7 +139,7 @@ 

  

          with hubs.tests.auth_set(app, self.user):

              resp = self.app.delete(

-                 '/{}/notifications/{}/'.format(self.user.username, idx)

+                 '/stream/saved/{}/'.format(self.user.username, idx)

              )

          self.assertEqual(resp.status_code, 404)

  

file added
+30
@@ -0,0 +1,30 @@ 

+ """

+ Attributes:

+     cache (dogpile.cache.region.CacheRegion): The cache where data

+         will be stored. It is configured with the ``fedora-hubs.cache`` key in

+         fedmsg configuration.

+ """

+ 

+ from __future__ import unicode_literals

+ 

+ import dogpile

+ import dogpile.cache

+ 

+ from hubs.utils import get_fedmsg_config

+ 

+ 

+ def _get_cache():

+     cache_defaults = {

+         "backend": "dogpile.cache.dbm",

+         "expiration_time": 1,  # Expire every 1 second, for development

+         "arguments": {

+             "filename": "/var/tmp/fedora-hubs-cache.db",

+         },

+     }

+     cache = dogpile.cache.make_region()

+     fedmsg_config = get_fedmsg_config()

+     cache.configure(**fedmsg_config.get('fedora-hubs.cache', cache_defaults))

+     return cache

+ 

+ 

+ cache = _get_cache()

file added
+45
@@ -0,0 +1,45 @@ 

+ from __future__ import unicode_literals

+ 

+ import logging

+ 

+ import requests

+ 

+ from .cache import cache

+ 

+ log = logging.getLogger(__name__)

+ 

+ PKGDB_URL = "https://admin.fedoraproject.org/pkgdb/api/packager/package"

+ 

+ 

+ def get_owned_packages(username, use_cache=True):

+     """Get the list of packages owned by the username.

+ 

+     Args:

+         username (str): The FAS username.

+         use_cache (bool): Whether to cache the results for a few minutes.

+             Defaults to ``True``.

+     """

+     def get_data():

+         url = "/".join([PKGDB_URL, username])

+         try:

+             response = requests.get(url, timeout=5)

+         except (requests.exceptions.ConnectionError,

+                 requests.exceptions.Timeout):

+             log.warning("Could not get the list of owned packages for %s",

+                         username)

+             return []

+         try:

+             data = response.json()

+         except ValueError:

+             log.warning(

+                 "Invalid JSON response getting the list of packages "

+                 "owned by %s: %s", username, response.text)

+             return []

+         owned = data['point of contact'] + data['co-maintained']

+         return sorted(set(p['name'] for p in owned))

+     if not use_cache:

+         return get_data()

+     else:

+         cache_key = ":".join(["owned_packages", username])

+         # Cache for 5 minutes, it seems reasonable.

+         return cache.get_or_create(cache_key.encode("utf-8"), get_data, 5 * 60)

file modified
+27 -2
@@ -53,7 +53,7 @@ 

      from hubs.widgets import registry

      widgets = {"left": [], "right": []}

      try:

-         user = flask.g.auth.user

+         user = flask.g.user

      except AttributeError:

          user = None

      for widget in hub.widgets:
@@ -166,7 +166,7 @@ 

              hub_name = kwargs[url_param]

              hub = get_hub(hub_name, load_config=True)

              try:

-                 user = flask.g.auth.user

+                 user = flask.g.user

              except AttributeError:

                  user = None

              if not hub.allows(user, action):
@@ -286,3 +286,28 @@ 

          if errors:

              raise ValueError(errors)

          return values

+ 

+ 

+ def get_sse_url(target):

+     """Build the SSE URL."""

+     # Avoid circular import with widgets.

+     from hubs.app import app

+     # TODO: have a page (hub, user stream, ...) specific SSE URL to only

+     # subscribe to relevant messages.

+     base_url = flask.url_for("index", _external=True)

+     base_url = urlparse.urlsplit(base_url)

+     conf = app.config["SSE_URL"]

+     host = conf.get("host") or base_url.hostname

+     port = conf.get("port") or base_url.port

+     netloc = host

+     if port:

+         netloc = "{}:{}".format(host, port)

+     path = conf.get("path", "")

+     if not path.endswith("/"):

+         path += "/"

+     path = urlparse.urljoin(base_url.path, path)

+     path = urlparse.urljoin(path, target)

+     return urlparse.urlunsplit([

+         conf.get("scheme") or base_url.scheme,

+         netloc, path, "", "",

+     ])

file modified
+9 -4
@@ -6,8 +6,8 @@ 

  

  from hubs.app import app

  from hubs.utils.views import (

-     get_hub, get_visible_widgets, login_required, RequestValidator,

-     require_hub_access,

+     get_hub, get_sse_url, get_visible_widgets, login_required,

+     RequestValidator, require_hub_access,

      )

  

  
@@ -18,7 +18,12 @@ 

      hub = get_hub(name, load_config=True)

      widgets = get_visible_widgets(hub)

      return flask.render_template(

-         'hubs.html', hub=hub, widgets=widgets, edit=False)

+         'hubs.html',

+         hub=hub,

+         widgets=widgets,

+         edit=False,

+         sse_url=get_sse_url("hub/{}".format(hub.name)),

+         )

  

  

  @app.route('/<name>/json/')
@@ -188,7 +193,7 @@ 

              "role": assoc.role,

          }

          if (assoc.role == "owner" and

-                 assoc.user.username == flask.g.auth.user.username):

+                 assoc.user.username == flask.g.user.username):

              user["locked"] = True

          result["result"]["users"][assoc.role].append(user)

      return result

file modified
+38 -39
@@ -1,52 +1,51 @@ 

  from __future__ import unicode_literals, absolute_import

  

  import flask

- import json

  import hubs.models

- import hubs.stream

+ import hubs.feed

  

  from hubs.app import app

- from hubs.utils.views import (

-     login_required, get_hub, get_visible_widgets, require_hub_access,

-     )

+ from hubs.utils.views import login_required, get_hub, get_sse_url

  

  

- @app.route('/<name>/stream')

- @app.route('/<name>/stream/')

+ @app.route('/stream')

+ @app.route('/stream/')

  @login_required

- @require_hub_access("view")

- def stream(name):

-     hub = get_hub(name)

-     widgets = get_visible_widgets(hub)

-     saved = hubs.models.SavedNotification.by_username(name)

-     saved = [n.__json__() for n in saved]

- 

-     stream = hubs.stream.Stream()

-     actions = stream.get_json()

- 

+ def stream():

+     username = flask.g.user.username

+     hub = get_hub(username)

      return flask.render_template(

          'stream.html',

          hub=hub,

-         widgets=widgets,

-         saved=json.dumps(saved),

-         actions=actions

+         sse_url=get_sse_url("user/{}".format(username)),

      )

  

  

- @app.route('/<user>/notifications', methods=['GET', 'POST'])

- @app.route('/<user>/notifications/', methods=['GET', 'POST'])

+ @app.route('/stream/existing')

  @login_required

- def notifications(user):

-     if flask.request.method == 'GET':

-         notifications = hubs.models.SavedNotification.by_username(user)

-         notifications = [n.__json__() for n in notifications]

-         return flask.jsonify(dict(notifications=notifications))

- 

-     if flask.request.method == 'POST':

+ def stream_existing():

+     username = flask.g.user.username

+     feed = hubs.feed.Notifications(username)

+     existing = feed.get()  # TODO: paging?

+     # Right now, stream and actions are the same.

+     # Once mentions is implemented, then each will be its own.

+     return flask.jsonify(dict(

+         status="OK", data=existing,

+         ))

+ 

+ 

+ @app.route('/stream/saved', methods=['GET', 'POST'])

+ @app.route('/stream/saved/', methods=['GET', 'POST'])

+ @login_required

+ def saved_notifs():

+     user = flask.g.user

+     if flask.request.method == "GET":

+         saved = hubs.models.SavedNotification.by_username(user.username)

+         return flask.jsonify(dict(

+             status="OK", data=[n.__json__() for n in saved],

+             ))

+     elif flask.request.method == "POST":

          data = flask.request.get_json()

-         user = hubs.models.User.by_username(user)

-         if not user:

-             return flask.abort(400)

          try:

              markup = data['markup']

              link = data['link']
@@ -63,22 +62,22 @@ 

          )

          flask.g.db.add(notification)

          flask.g.db.commit()

-         return flask.jsonify(

-             dict(notification=notification.__json__(), success=True)

-         )

+         return flask.jsonify(dict(

+             status="OK", data=notification.__json__(),

+             ))

  

  

- @app.route('/<user>/notifications/<int:idx>', methods=['DELETE'])

- @app.route('/<user>/notifications/<int:idx>/', methods=['DELETE'])

+ @app.route('/stream/saved/<int:idx>', methods=['DELETE'])

+ @app.route('/stream/saved/<int:idx>/', methods=['DELETE'])

  @login_required

- def delete_notifications(user, idx):

+ def delete_notifs(idx):

      notification = flask.g.db.query(

          hubs.models.SavedNotification).filter_by(idx=idx).first()

      if not notification:

          return flask.abort(400)

      flask.g.db.delete(notification)

      flask.g.db.commit()

-     return flask.jsonify(dict(status_code=200))

+     return flask.jsonify(dict(status="OK"))

  

  

  @app.route('/visit/<visited_hub>/', methods=['GET', 'POST'])

@@ -1,13 +1,18 @@ 

  from __future__ import unicode_literals

  

- import requests

+ import logging

+ 

  import pkgwat.api

  

+ from hubs.utils.pkgdb import get_owned_packages

  from hubs.widgets import validators

  from hubs.widgets.base import Widget, WidgetView

  from hubs.widgets.caching import CachedFunction

  

  

+ log = logging.getLogger('hubs.widgets')

+ 

+ 

  PKGDB_URL = "https://admin.fedoraproject.org/pkgdb/api/packager/package"

  

  
@@ -17,11 +22,17 @@ 

      label = "Bugzilla issues"

      position = "right"

      parameters = [dict(

-         name="username",

-         label="Username",

-         default=None,

-         validator=validators.Username,

-         help="A FAS username.",

+             name="username",

+             label="Username",

+             default=None,

+             validator=validators.Username,

+             help="A FAS username.",

+         ), dict(

+             name="max",

+             label="Max number of issues",

+             default=3,

+             validator=validators.Integer,

+             help="The maximum number of issues to display.",

          )]

  

  
@@ -41,63 +52,38 @@ 

  

  

  class GetIssues(CachedFunction):

+     """Returns data for Bugzilla widget.

  

-     ''' Returns data for Bugzilla Widget. Queries pkgdb api for package

-     list of the user and bugzilla for correspoding issues '''

+     Queries pkgdb api for package list of the user and bugzilla for

+     corresponding issues.

+     """

  

      def execute(self):

          username = self.instance.config["username"]

-         url = "/".join([PKGDB_URL, username])

-         response = requests.get(url)

-         data = response.json()

- 

+         max_num = int(self.instance.config.get("max", 3))

+         owned = get_owned_packages(username)

          issues = []

- 

-         # get the packages of which the user is

-         # point of contact

-         for package in data["point of contact"]:

-             if len(issues) == 3:

-                 break

-             pkg_details = pkgwat.api.bugs(package['name'])

-             for row in pkg_details['rows']:

-                 if len(issues) == 3:

-                     break

-                 issues.append(

-                     dict(

-                         id=row['id'],

-                         title=row['description'],

-                         pkg_name=package['name'],

-                     )

-                 )

- 

-         # get the packages of which the user is

-         # co maintainer

-         for package in data["co-maintained"]:

-             if len(issues) == 3:

+         for pkg_name in owned:

+             if len(issues) == max_num:

                  break

-             pkg_details = pkgwat.api.bugs(package['name'])

+             pkg_details = pkgwat.api.bugs(pkg_name)

              for row in pkg_details['rows']:

-                 if len(issues) == 3:

+                 if len(issues) == max_num:

                      break

                  issues.append(

                      dict(

                          id=row['id'],

                          title=row['description'],

-                         pkg_name=package['name'],

+                         pkg_name=pkg_name,

                      )

                  )

- 

          return issues

  

      def should_invalidate(self, message):

+         try:

+             component = message['msg']['bug']['component']

+         except KeyError:

+             return False

          username = self.instance.config["username"]

-         url = "/".join([PKGDB_URL, username])

-         response = requests.get(url)

-         data = response.json()

- 

-         owned = data['point of contact'] + data['co-maintained']

-         owned = [p['name'] for p in owned]

-         if message['msg']['bug']['component'] in owned:

-             return True

- 

-         return False

+         owned = get_owned_packages(username)

+         return (component in owned)

file modified
+3 -26
@@ -1,37 +1,14 @@ 

- """

- Attributes:

-     cache (dogpile.cache.region.CacheRegion): The cache where function results

-         will be stored. It is configured with the ``fedora-hubs.cache`` key in

-         fedmsg configuration.

- """

  from __future__ import unicode_literals

  

  import datetime

- import dogpile

- import dogpile.cache

  import logging

  

- from hubs.utils import get_fedmsg_config

- 

- 

- log = logging.getLogger(__name__)

- 

+ import dogpile.cache

  

- def _get_cache():

-     cache_defaults = {

-         "backend": "dogpile.cache.dbm",

-         "expiration_time": 1,  # Expire every 1 second, for development

-         "arguments": {

-             "filename": "/var/tmp/fedora-hubs-cache.db",

-         },

-     }

-     cache = dogpile.cache.make_region()

-     fedmsg_config = get_fedmsg_config()

-     cache.configure(**fedmsg_config.get('fedora-hubs.cache', cache_defaults))

-     return cache

+ from hubs.utils.cache import cache

  

  

- cache = _get_cache()

+ log = logging.getLogger(__name__)

  

  

  class CachedFunction(object):

@@ -55,8 +55,14 @@ 

          username = self.instance.config["username"]

          url = "https://apps.fedoraproject.org/datagrepper/raw?user={username}"

          url = url.format(username=username)

-         response = requests.get(url)

-         fedmsgs = response.json()['total']

+         try:

+             response = requests.get(url, timeout=5)

+             fedmsgs = response.json()['total']

+         except (requests.exceptions.Timeout, ValueError):

+             fedmsgs = None

+             fedmsgs_text = "?"

+         else:

+             fedmsgs_text = commas(fedmsgs)

          sub_list = []

          for assoc in self.instance.hub.associations:

              if assoc.user:
@@ -64,7 +70,7 @@ 

          subscribers = [u.username for u in self.instance.hub.subscribers]

          return dict(

              fedmsgs=fedmsgs,

-             fedmsgs_text=commas(fedmsgs),

+             fedmsgs_text=fedmsgs_text,

              subscribers=subscribers,

              subscribed_to=sub_list,

              subscribers_text=commas(len(subscribers)),
@@ -73,5 +79,4 @@ 

  

      def should_invalidate(self, message):

          usernames = fedmsg.meta.msg2usernames(message, **fedmsg_config)

-         username = self.instance.config['username']

-         return username in usernames

+         return (self.instance.config['username'] in usernames)

file modified
+24 -16
@@ -1,11 +1,15 @@ 

- from __future__ import unicode_literals

+ from __future__ import unicode_literals, absolute_import

  

  

+ import logging

+ 

  from hubs.widgets import validators

  from hubs.widgets.base import Widget, WidgetView

  

- import logging

- log = logging.getLogger('hubs')

+ from .functions import GetData

+ 

+ 

+ log = logging.getLogger('hubs.widgets')

  

  

  class Feed(Widget):
@@ -15,18 +19,13 @@ 

      position = "left"

      parameters = [

          {

-             "name": "username",

-             "label": "Username",

-             "default": None,

-             "validator": validators.Username,

-             "help": "A FAS username.",

-         }, {

              "name": "message_limit",

              "label": "Message limit",

              "default": 20,

              "validator": validators.Integer,

              "help": "Max number of feed messages to display.",

          }]

+     cached_functions_module = ".functions"

  

  

  class BaseView(WidgetView):
@@ -36,13 +35,22 @@ 

      template_name = "feed.html"

  

      def get_context(self, instance, *args, **kwargs):

-         # Avoid circular import

-         from hubs.app import app

-         username = instance.config["username"]

-         feed_url = app.config['SSE_URL'] + username

          return dict(

              title=self.widget.label,

-             matches=[],

-             message_limit=instance.config["message_limit"],

-             feed_url=feed_url,

+             disable_autoreload=True,

              )

+ 

+ 

+ class ExistingView(WidgetView):

+ 

+     name = "existing"

+     url_rules = ["/existing"]

+     json = True

+ 

+     def get_context(self, instance, *args, **kwargs):

+         get_data = GetData(instance)

+         existing = get_data()

+         return {

+             "status": "OK",

+             "data": existing,

+         }

@@ -0,0 +1,26 @@ 

+ from __future__ import unicode_literals, absolute_import

+ 

+ 

+ import fedmsg.meta

+ 

+ from hubs.feed import Activity, format_msg

+ from hubs.widgets.caching import CachedFunction

+ 

+ 

+ class GetData(CachedFunction):

+     """Get the feed data from Redis and aggregate it."""

+ 

+     def execute(self):

+         hub_name = self.instance.hub.name

+         feed = Activity(hub_name)

+         raw_msgs = feed.get()  # TODO: paging?

+         msgs = fedmsg.meta.conglomerate(raw_msgs)

+         msgs = [format_msg(msg) for msg in msgs]

+         limit = self.instance.config["message_limit"]

+         return msgs[:limit]

+ 

+     def should_invalidate(self, message):

+         if "_hubs" not in message:

+             return False

+         hub_name = self.instance.hub.name

+         return (hub_name in message["_hubs"])

@@ -1,21 +1,18 @@ 

  {% extends "panel.html" %}

  

  {% block content %}

- <div id="feed">

+ <div id="feed-{{ widget_instance.idx }}">

  </div>

  

  <script type="text/javascript"

-   src ="{{url_for('static', filename='js/build/Feed.js')}}"></script>

+   src ="{{ url_for('static', filename='js/build/Feed.js') }}"></script>

  <script>

  (function() {

-     const FeedElement = React.createElement(Feed, {

-         matches: {{ matches }},

-         url: '{{ feed_url }}',

-         options: {

-             messageLimit: {{ message_limit }}

-         }

+     const FeedElement = React.createElement(Feed.Widget, {

+         url: {{ url_for("feed_existing", hub=widget_instance.hub.name, idx=widget_instance.idx) | tojson }},

+         widgetIdx: {{ widget_instance.idx | tojson }}

      });

-     ReactDOM.render(FeedElement, document.getElementById('feed'));

+     ReactDOM.render(FeedElement, document.getElementById('feed-' + {{ widget_instance.idx | tojson }}));

  })();

  </script>

  {% endblock %}

@@ -30,6 +30,7 @@ 

          return dict(

              hubs=instance.config.get("hubs", []),

              title=self.widget.label,

+             disable_autoreload=True,

              )

  

  

@@ -45,15 +45,23 @@ 

  

      def get_context(self, instance, *args, **kwargs):

          get_meetings = GetMeetings(instance)

+         now = datetime.datetime.utcnow()

+         meetings = {

+             title: meeting

+             for title, meeting in get_meetings().items()

+             if meeting['start_dt'] > now

+             }

          return dict(

              title=self.widget.label,

              calendar=instance.config["calendar"],

-             meetings=get_meetings(),

+             meetings=meetings,

              )

  

  

  class GetMeetings(CachedFunction):

  

+     TOPIC = ".fedocal.calendar."

+ 

      def execute(self):

          calendar = self.instance.config["calendar"]

          n_meetings = self.instance.config.get("n_meetings", 4)
@@ -81,18 +89,13 @@ 

          return meetings

  

      def should_invalidate(self, message):

-         # TODO -- first, if this is a fedocal widget, we need to just

-         # invalidate ourselves right away.

- 

-         # second, check our old cache value and see if any of our meetings have

-         # passed by in time.

-         old_meetings = self.execute()

-         now = datetime.datetime.utcnow()

-         for title, meeting in old_meetings.items():

-             assert type(meeting['start_dt']) == type(now)

-             if meeting['start_dt'] < now:

-                 return True

-         return False

+         if self.TOPIC not in message["topic"]:

+             return False

+         try:

+             calendar = message["msg"]["calendar"]["calendar_name"]

+         except KeyError:

+             return False

+         return (calendar == self.instance.config.get("calendar"))

  

  

  def next_meeting(meetings):

@@ -46,7 +46,10 @@ 

          repo = self.instance.config["repo"]

          url = '/'.join([pagure_url, repo, "pull-requests"])

          response = requests.get(url)

-         data = response.json()

+         try:

+             data = response.json()

+         except ValueError:

+             return dict(all_pr=[], total_req=0)

          total_req = data['total_requests']

          all_pr = list()

  
@@ -77,5 +80,8 @@ 

          if category != "pagure":

              # TODO -- this could be honed in more to just PRs

              return False

-         return (message['msg']['project']['name'] ==

-                 self.instance.config['repo'])

+         try:

+             project = message['msg']['project']['name']

+         except KeyError:

+             return False

+         return (project == self.instance.config['repo'])

@@ -1,11 +1,13 @@ 

- <div class="card {{ panel_css_class }} widget-{{ widget.name }}">

+ <div class="card {{ panel_css_class }} widget-{{ widget.name }}"

+      {% if disable_autoreload %}data-disable-autoreload="true"{% endif %}

+     >

    <div class="widget-buttons">

      <!-- the AGPLv3 wrapper puts the source url in all responses -->

      <a href="{{ url_for('widget_source', name=widget.name) }}"><span><i class="fa fa-eye" aria-hidden="true"></i></span></a>

      <a href="{{ url_for('%s_root' % widget.name, hub=widget_instance.hub.name, idx=widget_instance.idx) }}">

          <span><i class="fa fa-external-link" aria-hidden="true"></i></span>

      </a>

-     {% if widget_instance.hub.allows(g.auth.user, "config") %}

+     {% if widget_instance.hub.allows(g.user, "config") %}

      <a data-target="#edit_modal" data-toggle="modal" type="button"

          class="edit_widget" data-url="{{ widget_instance.edit_url }}">

        <span><i class="fa fa-cog" aria-hidden="true"></i></span>

file modified
+1 -1
@@ -94,7 +94,7 @@ 

      @classmethod

      def to_string(cls, value):

          if value is None and flask.g.auth.logged_in:

-             return flask.g.auth.user.username

+             return flask.g.user.username

          return value

  

  

file modified
+1 -1
@@ -105,7 +105,7 @@ 

          """

          instance = self._get_instance(*args, **kwargs)

          try:

-             user = flask.g.auth.user

+             user = flask.g.user

          except AttributeError:

              user = None

          if not instance.allows(user, self.permission):

@@ -63,7 +63,7 @@ 

              return False

          # Search the message to see if I am in the ACLs list of the request.

          username = self.instance.config["username"]

-         for acl in message['msg']['package_listing']['acls']:

+         for acl in message['msg']['package_listing'].get('acls', []):

              if acl['fas_name'] == username and acl['status'] == 'Approved':

                  return True

          return False

file modified
+2
@@ -24,4 +24,6 @@ 

  six

  pygments

  pygments-markdown-lexer

+ redis

  retask

+ txredisapi

file modified
+1 -1
@@ -15,7 +15,7 @@ 

  import hubs.app

  import hubs.models

  import hubs.widgets.base

- import hubs.widgets.caching

+ 

  

  # get the DB session

  session = hubs.app.session

@@ -0,0 +1,19 @@ 

+ [Unit]

+ Description=fedora-hubs SSE server

+ After=network.target

+ Documentation=https://pagure.io/fedora-hubs/

+ 

+ [Service]

+ ExecStart= \

+     twistd -l - --pidfile= \

+     -ny /srv/hubs/fedora-hubs/hubs/backend/sse_server.tac

+ WorkingDirectory=/srv/hubs/fedora-hubs/

+ PIDFile=/run/hubs-sse.pid

+ Type=simple

+ User=root

+ Group=root

+ Restart=on-failure

+ 

+ [Install]

+ WantedBy=multi-user.target

+ 

The feed & stream page backends were pretty much phony, this pull request reworks the UI (React) to load more dynamically, and adds a backend that gets messages from the bus, processes them, stores them in the Redis DB, and notifies the UI.

As always with big pull requests, reading commit-by-commit should be much easier than the whole diff.

rebased

6 years ago

6 new commits added

  • Compatibility with Python < 3.5
  • Update documentation
  • Add unit tests
  • Add a parameter to control the number of issues to display
  • Cache the list of owned packages
  • Fix some errors in the ansible role
6 years ago

4 new commits added

  • Ansible role: better defaults
  • One more small fix to the ansible role
  • Fix typo
  • Ansible role: add a parameter for the SSL cert
6 years ago

2 new commits added

  • Protect the pagure_pr widget against malformed JSON
  • Ansible role: OIDC needs larger Nginx buffers
6 years ago

Pull-Request has been merged by abompard

6 years ago
Metadata
Changes Summary 84
+1 -0
file changed
Vagrantfile.example
+3 -1
file changed
ansible/roles/hubs/defaults/main.yml
+9
file added
ansible/roles/hubs/files/nginx_proxy_params
+4 -0
file changed
ansible/roles/hubs/handlers/main.yml
+2 -0
file changed
ansible/roles/hubs/tasks/db-postgresql.yml
+3 -1
file changed
ansible/roles/hubs/tasks/db-sqlite.yml
+8 -1
file changed
ansible/roles/hubs/tasks/main.yml
+20 -2
file changed
ansible/roles/hubs/tasks/webserver.yml
+18
file added
ansible/roles/hubs/templates/hubs-sse.service
+9 -0
file changed
ansible/roles/hubs/templates/hubs_config
+19 -8
file changed
ansible/roles/hubs/templates/nginx.conf
+2 -2
file changed
ansible/roles/hubs/templates/nginx_ssl_params
+1 -0
file changed
ansible/vagrant-playbook.yml
+0 -1
file changed
check-cache-coverage.py
+1 -1
file changed
docs/api/index.rst
+21
file added
docs/api/utils.rst
-9
file removed
docs/api/views.rst
+24 -16
file changed
docs/dev-guide.rst
+1 -1
file changed
docs/diagram.txt
+24 -18
file changed
hubs/app.py
+215
file added
hubs/backend/sse_server.py
+43
file added
hubs/backend/sse_server.tac
+49 -13
file changed
hubs/backend/triage.py
+39 -7
file changed
hubs/backend/worker.py
+5 -1
file changed
hubs/default_config.py
+1 -0
file changed
hubs/default_fedmsg_config.py
+210
file added
hubs/feed.py
-34
file removed
hubs/static/client/app/__tests__/Feed.test.js
-71
file removed
hubs/static/client/app/components/Dropdown.jsx
-28
file removed
hubs/static/client/app/components/Markup.jsx
-19
file removed
hubs/static/client/app/components/Panel.jsx
+39
file added
hubs/static/client/app/components/feed/Actions.jsx
+52
file added
hubs/static/client/app/components/feed/Feed.jsx
+13 -13
file renamed
hubs/static/client/app/components/Icon.jsx
hubs/static/client/app/components/feed/Icon.jsx
+133
file added
hubs/static/client/app/components/feed/ItemsGetter.jsx
+53
file added
hubs/static/client/app/components/feed/Markup.jsx
+52
file added
hubs/static/client/app/components/feed/Panel.jsx
+34
file added
hubs/static/client/app/components/feed/__tests__/Feed.test.js
+10 -8
file renamed
hubs/static/client/app/__tests__/Icon.test.js
hubs/static/client/app/components/feed/__tests__/Icon.test.js
+9 -6
file renamed
hubs/static/client/app/__tests__/Markup.test.js
hubs/static/client/app/components/feed/__tests__/Markup.test.js
+25 -6
file renamed
hubs/static/client/app/__tests__/Panel.test.js
hubs/static/client/app/components/feed/__tests__/Panel.test.js
+2 -1
file changed
hubs/static/client/app/core/Hubs.js
+167
file added
hubs/static/client/app/core/Streams.jsx
+7
file added
hubs/static/client/app/widgets/feed/Feed.js
-53
file removed
hubs/static/client/app/widgets/feed/Feed.jsx
+45
file added
hubs/static/client/app/widgets/feed/Widget.jsx
+1 -1
file changed
hubs/static/client/webpack.config.js
+11 -0
file changed
hubs/static/css/style.css
+38 -0
file changed
hubs/static/js/utils.js
-11
file removed
hubs/stream.py
+3 -12
file changed
hubs/templates/hubs.html
+5 -4
file changed
hubs/templates/master.html
+10 -88
file changed
hubs/templates/stream.html
+20 -1
file changed
hubs/tests/__init__.py
+7 -1
file changed
hubs/tests/hubs_test.cfg
+145
file added
hubs/tests/test_feed.py
+2 -1
file changed
hubs/tests/test_widget_caching.py
+0
file added
hubs/tests/utils/__init__.py
+50
file added
hubs/tests/utils/test_pkgdb.py
+47 -2
file renamed
hubs/tests/test_view_utils.py
hubs/tests/utils/test_views.py
+451
file added
hubs/tests/vcr-request-data/hubs.tests.utils.test_pkgdb.PkgdbUtilsTest.test_get_owned_packages
+30
file added
hubs/tests/vcr-request-data/hubs.tests.utils.test_pkgdb.PkgdbUtilsTest.test_get_owned_packages_invalid
+39 -21
file changed
hubs/tests/views/test_user.py
+30
file added
hubs/utils/cache.py
+45
file added
hubs/utils/pkgdb.py
+27 -2
file changed
hubs/utils/views.py
+9 -4
file changed
hubs/views/hub.py
+38 -39
file changed
hubs/views/user.py
+34 -48
file changed
hubs/widgets/bugzilla/__init__.py
+3 -26
file changed
hubs/widgets/caching.py
+10 -5
file changed
hubs/widgets/fedmsgstats/__init__.py
+24 -16
file changed
hubs/widgets/feed/__init__.py
+26
file added
hubs/widgets/feed/functions.py
+6 -9
file changed
hubs/widgets/feed/templates/feed.html
+1 -0
file changed
hubs/widgets/halp/views.py
+16 -13
file changed
hubs/widgets/meetings/__init__.py
+9 -3
file changed
hubs/widgets/pagure_pr/__init__.py
+4 -2
file changed
hubs/widgets/templates/panel.html
+1 -1
file changed
hubs/widgets/validators.py
+1 -1
file changed
hubs/widgets/view.py
+1 -1
file changed
hubs/widgets/workflow/pendingacls.py
+2 -0
file changed
requirements.txt
+1 -1
file changed
smart_cache_invalidator.py
+19
file added
systemd/hubs-sse.service