diff --git a/.github/workflows/ci-docker.yaml b/.github/workflows/ci-docker.yaml index 624d69d..972bacf 100644 --- a/.github/workflows/ci-docker.yaml +++ b/.github/workflows/ci-docker.yaml @@ -17,6 +17,17 @@ jobs: username: ${{ github.repository_owner }} password: ${{ secrets.GITHUB_TOKEN }} + - name: Build Test Docker Image + uses: docker/build-push-action@v2 + with: + context: "." + push: false + tags: ghcr.io/${{ github.repository }}:${{ github.ref_name }}-test + target: test + + - name: Execute Tests + run: docker run ghcr.io/${{ github.repository }}:${{ github.ref_name }}-test + - name: Build and Push Docker Image uses: docker/build-push-action@v2 with: diff --git a/Dockerfile b/Dockerfile index 13c656e..6005872 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.6.14 +FROM python:3.12.3 as build MAINTAINER RAMS Project "code@magfest.org" LABEL version.sideboard ="1.0" WORKDIR /app @@ -69,11 +69,23 @@ RUN curl -SLO "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux- # required for python-prctl RUN apt-get update && apt-get install -y libcap-dev && rm -rf /var/lib/apt/lists/* -ADD . /app/ RUN pip3 install virtualenv \ && virtualenv --always-copy /app/env \ - && /app/env/bin/pip3 install paver "setuptools<58" + && /app/env/bin/pip3 install paver + +ADD requirements.txt requirements.txt +ADD test_requirements.txt test_requirements.txt +ADD setup.py setup.py +ADD sideboard/_version.py sideboard/_version.py +ADD pavement.py pavement.py + RUN /app/env/bin/paver install_deps +ADD . /app/ + +FROM build as test +RUN /app/env/bin/pip install mock pytest +CMD /app/env/bin/python3 -m pytest +FROM build as release CMD /app/env/bin/python3 /app/sideboard/run_server.py -EXPOSE 8282 +EXPOSE 8282 \ No newline at end of file diff --git a/pavement.py b/pavement.py index b3006b6..404242b 100644 --- a/pavement.py +++ b/pavement.py @@ -2,7 +2,6 @@ import os import sys import glob -import pkg_resources from itertools import chain from os.path import abspath, dirname, exists, join @@ -110,28 +109,6 @@ def pull_plugins(): sh('cd "{}";git pull'.format(plugin_dir)) -@task -def assert_all_files_import_unicode_literals(): - """ - error if a python file is found in sideboard or plugins that does not import unicode_literals; \ -this is skipped for Python 3 - """ - if sys.version_info[0] == 2: - all_files_found = [] - cmd = ("find '%s' -name '*.py' ! -size 0 " - "-exec grep -RL 'from __future__ import.*unicode_literals.*$' {} \;") - for test_dir in chain(['sideboard'], collect_plugin_dirs(module=True)): - output = sh(cmd % test_dir, capture=True) - if output: - all_files_found.append(output) - - if all_files_found: - print('the following files did not include "from __future__ import unicode_literals":') - print(''.join(all_files_found)) - raise BuildFailure("there were files that didn't include " - '"from __future__ import unicode_literals"') - - @task def assert_all_projects_correctly_define_a_version(): """ @@ -180,12 +157,6 @@ def run_all_assertions(): def create_plugin(options): """create a plugin skeleton to start a new project""" - # this is actually needed thanks to the skeleton using jinja2 (and six, although that's changeable) - try: - pkg_resources.get_distribution("sideboard") - except pkg_resources.DistributionNotFound: - raise BuildFailure("This command must be run from within a configured virtual environment.") - plugin_name = options.create_plugin.name if getattr(options.create_plugin, 'drop', False) and (PLUGINS_DIR / path(plugin_name.replace('_', '-'))).exists(): diff --git a/requirements.txt b/requirements.txt index a8c4727..b701108 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,15 @@ -cherrypy==17.3.0 -configobj>=5.0.5 -Jinja2>=2.7 -logging_unterpolation>=0.2.0 -paver>=1.2.2 -pip>=1.5.6 -psutil>=5.4.1 -python-prctl>=1.6.1; 'linux' in sys_platform -redis==4.3.6 -requests>=2.2.1 -rpctools>=0.3.1 -sh>=1.09 -six>=1.5.2 -SQLAlchemy>=1.1.0 -wheel>=0.24.0 -ws4py>=0.3.2 +cherrypy==18.9.0 +configobj==5.0.8 +Jinja2==3.1.3 +paver==1.3.4 +pip==24.0 +psutil==5.9.8 +python-prctl==1.8.1; 'linux' in sys_platform +redis==5.0.3 +requests==2.31.0 +rpctools==0.3.1 +sh==2.0.6 +six==1.16.0 +SQLAlchemy==1.4.52 +wheel==0.43.0 +ws4py==0.5.1 diff --git a/sideboard/config.py b/sideboard/config.py index c78f985..d6a27b2 100755 --- a/sideboard/config.py +++ b/sideboard/config.py @@ -3,7 +3,7 @@ import re from os import unlink -from collections import Sized, Iterable, Mapping +from collections.abc import Sized, Iterable, Mapping from copy import deepcopy from tempfile import NamedTemporaryFile diff --git a/sideboard/configspec.ini b/sideboard/configspec.ini index 927176c..87b9a6d 100644 --- a/sideboard/configspec.ini +++ b/sideboard/configspec.ini @@ -33,10 +33,7 @@ ws.call_timeout = integer(default=10) # seconds ws.poll_interval = integer(default=300) # seconds ws.reconnect_interval = integer(default=60) # seconds -# Sideboard exposes two websocket endpoints. The first is at /wsrpc and doesn't -# require authentication, with the expectation being that the frontend webserver -# which reverse proxies to Sideboard will either block or require a client cert -# for this endpoint. The second is at /ws and by default requires a logged-in +# Sideboard exposes a websocket at /ws and by default requires a logged-in # user to work. This setting can turn off that authentication check, which is # useful for development or for applications which require no authentication. ws.auth_required = boolean(default=True) diff --git a/sideboard/internal/logging.py b/sideboard/internal/logging.py index b8c4277..20e71b8 100644 --- a/sideboard/internal/logging.py +++ b/sideboard/internal/logging.py @@ -2,8 +2,6 @@ import os import logging.config -import logging_unterpolation - from sideboard.config import config, get_config_root @@ -20,7 +18,6 @@ def format(self, record): def _configure_logging(): - logging_unterpolation.patch_logging() fname = os.path.join(get_config_root(), 'logging.cfg') if os.path.exists(fname): logging.config.fileConfig(fname, disable_existing_loggers=True) diff --git a/sideboard/jsonrpc.py b/sideboard/jsonrpc.py index 895d88c..70b6e4d 100755 --- a/sideboard/jsonrpc.py +++ b/sideboard/jsonrpc.py @@ -3,7 +3,6 @@ import traceback import cherrypy -from cherrypy.lib.jsontools import json_decode from sideboard.lib import log, config, serializer from sideboard.websockets import trigger_delayed_notifications @@ -31,7 +30,7 @@ def force_json_in(): if cherrypy.request.method in ('POST', 'PUT'): body = request.body.fp.read() try: - cherrypy.serving.request.json = json_decode(body.decode('utf-8')) + cherrypy.serving.request.json = json.loads(body.decode('utf-8')) except ValueError: raise cherrypy.HTTPError(400, 'Invalid JSON document') @@ -49,14 +48,14 @@ def jsonrpc_handler(self=None): def error(code, message): body = {'jsonrpc': '2.0', 'id': id, 'error': {'code': code, 'message': message}} - log.warn('returning error message: {!r}', body) + log.warning('returning error message: %s', body) return body body = cherrypy.request.json if not isinstance(body, dict): return error(ERR_INVALID_JSON, 'invalid json input {!r}'.format(cherrypy.request.body)) - log.debug('jsonrpc request body: {!r}', body) + log.debug('jsonrpc request body: %s', body) id, params = body.get('id'), body.get('params', []) if 'method' not in body: @@ -83,7 +82,7 @@ def error(code, message): try: response = {'jsonrpc': '2.0', 'id': id, 'result': getattr(service, function)(*args, **kwargs)} - log.debug('returning success message: {!r}', response) + log.debug('returning success message: %s', response) return response except Exception as e: errback(e, 'unexpected jsonrpc error calling ' + method) diff --git a/sideboard/lib/_cp.py b/sideboard/lib/_cp.py index 4c61e23..d409533 100644 --- a/sideboard/lib/_cp.py +++ b/sideboard/lib/_cp.py @@ -88,7 +88,7 @@ def _run_shutdown(): try: func() except Exception: - log.warn('Ignored exception during shutdown', exc_info=True) + log.warning('Ignored exception during shutdown', exc_info=True) stopped = Event() on_startup(stopped.clear, priority=0) diff --git a/sideboard/lib/_redissession.py b/sideboard/lib/_redissession.py index 07bb307..548778d 100644 --- a/sideboard/lib/_redissession.py +++ b/sideboard/lib/_redissession.py @@ -1,88 +1,88 @@ -import threading - -try: - import cPickle as pickle -except ImportError: - import pickle - -from cherrypy.lib.sessions import Session -import redis -from redis import Sentinel - -class RedisSession(Session): - - # the default settings - host = '127.0.0.1' - port = 6379 - db = 0 - password = None - tls_skip_verify = False - is_sentinel = False - ssl = False - user = "" - - - @classmethod - def setup(cls, **kwargs): - """Set up the storage system for redis-based sessions. - Called once when the built-in tool calls sessions.init. - """ - # overwritting default settings with the config dictionary values - for k, v in kwargs.items(): - setattr(cls, k, v) - - if cls.tls_skip_verify: - cls.ssl_cert_req=None - else: - cls.ssl_cert_req="required" - - if cls.is_sentinel: - sentinel = Sentinel([(cls.host, cls.port)], ssl=cls.ssl, ssl_cert_reqs=cls.ssl_cert_req, sentinel_kwargs={"password":cls.sentinel_pass, "ssl": cls.ssl, "ssl_cert_reqs": cls.ssl_cert_req}, username=cls.user, password=cls.password) - cls.cache = sentinel.master_for(cls.sentinel_service) - - else: - cls.cache = redis.Redis( - host=cls.host, - port=cls.port, - db=cls.db, - ssl=cls.ssl, - username=cls.user, - password=cls.password) - - def _exists(self): - return bool(self.cache.exists(self.prefix+self.id)) - - def _load(self): - try: - return pickle.loads(self.cache.get(self.prefix+self.id)) - except TypeError: - # if id not defined pickle can't load None and raise TypeError - return None - - def _save(self, expiration_time): - pickled_data = pickle.dumps( - (self._data, expiration_time), - pickle.HIGHEST_PROTOCOL) - - result = self.cache.setex(self.prefix+self.id, self.timeout * 60, pickled_data) - - if not result: - raise AssertionError("Session data for id %r not set." % self.prefix+self.id) - - def _delete(self): - self.cache.delete(self.prefix+self.id) - - # http://docs.cherrypy.org/dev/refman/lib/sessions.html?highlight=session#locking-sessions - # session id locks as done in RamSession - - locks = {} - - def acquire_lock(self): - """Acquire an exclusive lock on the currently-loaded session data.""" - self.locked = True - self.locks.setdefault(self.prefix+self.id, threading.RLock()).acquire() - - def release_lock(self): - """Release the lock on the currently-loaded session data.""" - self.locks[self.prefix+self.id].release() +import threading + +try: + import cPickle as pickle +except ImportError: + import pickle + +from cherrypy.lib.sessions import Session +import redis +from redis import Sentinel + +class RedisSession(Session): + + # the default settings + host = '127.0.0.1' + port = 6379 + db = 0 + password = None + tls_skip_verify = False + is_sentinel = False + ssl = False + user = "" + + + @classmethod + def setup(cls, **kwargs): + """Set up the storage system for redis-based sessions. + Called once when the built-in tool calls sessions.init. + """ + # overwritting default settings with the config dictionary values + for k, v in kwargs.items(): + setattr(cls, k, v) + + if cls.tls_skip_verify: + cls.ssl_cert_req=None + else: + cls.ssl_cert_req="required" + + if cls.is_sentinel: + sentinel = Sentinel([(cls.host, cls.port)], ssl=cls.ssl, ssl_cert_reqs=cls.ssl_cert_req, sentinel_kwargs={"password":cls.sentinel_pass, "ssl": cls.ssl, "ssl_cert_reqs": cls.ssl_cert_req}, username=cls.user, password=cls.password) + cls.cache = sentinel.master_for(cls.sentinel_service) + + else: + cls.cache = redis.Redis( + host=cls.host, + port=cls.port, + db=cls.db, + ssl=cls.ssl, + username=cls.user, + password=cls.password) + + def _exists(self): + return bool(self.cache.exists(self.prefix+self.id)) + + def _load(self): + try: + return pickle.loads(self.cache.get(self.prefix+self.id)) + except TypeError: + # if id not defined pickle can't load None and raise TypeError + return None + + def _save(self, expiration_time): + pickled_data = pickle.dumps( + (self._data, expiration_time), + pickle.HIGHEST_PROTOCOL) + + result = self.cache.setex(self.prefix+self.id, self.timeout * 60, pickled_data) + + if not result: + raise AssertionError("Session data for id %r not set." % self.prefix+self.id) + + def _delete(self): + self.cache.delete(self.prefix+self.id) + + # http://docs.cherrypy.org/dev/refman/lib/sessions.html?highlight=session#locking-sessions + # session id locks as done in RamSession + + locks = {} + + def acquire_lock(self): + """Acquire an exclusive lock on the currently-loaded session data.""" + self.locked = True + self.locks.setdefault(self.prefix+self.id, threading.RLock()).acquire() + + def release_lock(self): + """Release the lock on the currently-loaded session data.""" + self.locks[self.prefix+self.id].release() self.locked = False \ No newline at end of file diff --git a/sideboard/lib/_services.py b/sideboard/lib/_services.py index 5bfe9c4..38b3080 100644 --- a/sideboard/lib/_services.py +++ b/sideboard/lib/_services.py @@ -176,7 +176,7 @@ def _ws_url(host, rpc_opts): Given a hostname and set of config options returned by _rpc_opts, return the standard URL websocket endpoint for a Sideboard remote service. """ - return '{protocol}://{host}/wsrpc'.format(host=host, protocol='wss' if rpc_opts['ca'] else 'ws') + return '{protocol}://{host}/ws'.format(host=host, protocol='wss' if rpc_opts['ca'] else 'ws') def _register_rpc_services(rpc_services): @@ -201,8 +201,6 @@ def _register_rpc_services(rpc_services): jservice = getattr(jproxy, service_name) if rpc_services.get(host, {}).get('jsonrpc_only'): service = jservice - else: - service = services._register_websocket(_ws_url(host, rpc_opts), ssl_opts=ssl_opts, connect_immediately=False) services.register(service, service_name, _jsonrpc=jservice, _override=True) @@ -218,6 +216,6 @@ class _SideboardCoreServices(object): """ def poll(self): """empty method which exists only to help keep WebSockets alive""" - log.debug('sideboard.poll by user {}', threadlocal.get('username')) + log.debug('sideboard.poll by user %s', threadlocal.get('username')) services.register(_SideboardCoreServices(), 'sideboard') diff --git a/sideboard/lib/_threads.py b/sideboard/lib/_threads.py index 559df7b..4d79c66 100644 --- a/sideboard/lib/_threads.py +++ b/sideboard/lib/_threads.py @@ -121,7 +121,7 @@ def stop(self): else: break else: - log.warning('not all daemons have been joined: {}', self.threads) + log.warning('not all daemons have been joined: %s', self.threads) del self.threads[:] diff --git a/sideboard/lib/_utils.py b/sideboard/lib/_utils.py index 93b3706..3f774bb 100644 --- a/sideboard/lib/_utils.py +++ b/sideboard/lib/_utils.py @@ -5,7 +5,8 @@ from datetime import datetime, date from contextlib import contextmanager from threading import RLock, Condition, current_thread -from collections import Sized, Iterable, Mapping, defaultdict +from collections.abc import Sized, Iterable, Mapping +from collections import defaultdict def is_listy(x): diff --git a/sideboard/lib/_websockets.py b/sideboard/lib/_websockets.py index e47b10e..4868565 100644 --- a/sideboard/lib/_websockets.py +++ b/sideboard/lib/_websockets.py @@ -6,7 +6,7 @@ from itertools import count from threading import RLock, Event from datetime import datetime, timedelta -from collections import Mapping, MutableMapping +from collections.abc import Mapping, MutableMapping import six from ws4py.client.threadedclient import WebSocketClient @@ -41,7 +41,7 @@ def close(self, code=1000, reason=''): self.connected = False def send(self, data): - log.debug('sending {!r}', data) + log.debug('sending %s', data) assert self.connected, 'tried to send data on closed websocket {!r}'.format(self.url) if isinstance(data, Mapping): data = json.dumps(data) @@ -49,7 +49,7 @@ def send(self, data): def received_message(self, message): message = message.data if isinstance(message.data, six.text_type) else message.data.decode('utf-8') - log.debug('received {!r}', message) + log.debug('received %s', message) try: message = json.loads(message) except: @@ -97,7 +97,7 @@ class WebSocket(object): def __init__(self, url=None, ssl_opts=None, connect_immediately=True, max_wait=2): self.ws = None - self.url = url or 'ws://127.0.0.1:{}/wsrpc'.format(config['cherrypy']['server.socket_port']) + self.url = url or 'ws://127.0.0.1:{}/ws'.format(config['cherrypy']['server.socket_port']) self._lock = RLock() self._callbacks = {} self._counter = count() @@ -148,7 +148,7 @@ def _poll(self): try: self.call(self.poll_method) except: - log.warning('no poll response received from {!r}, closing connection, will attempt to reconnect', self.url, exc_info=True) + log.warning('no poll response received from %s, closing connection, will attempt to reconnect', self.url, exc_info=True) self.ws.close() else: self._last_poll = datetime.now() @@ -169,7 +169,7 @@ def _reconnect(self): self.ws = self.WebSocketDispatcher(self._dispatcher, self.url, ssl_opts=self.ssl_opts) self.ws.connect() except Exception as e: - log.warn('failed to connect to {}: {}', self.url, str(e)) + log.warning('failed to connect to %s: %s', self.url, str(e)) self._last_reconnect_attempt = datetime.now() self._reconnect_attempts += 1 else: @@ -180,18 +180,18 @@ def _next_id(self, prefix): return '{}-{}'.format(prefix, next(self._counter)) def _send(self, **kwargs): - log.debug('sending {}', kwargs) + log.debug('sending %s', kwargs) with self._lock: assert self.connected, 'tried to send data on closed websocket {!r}'.format(self.url) try: return self.ws.send(kwargs) except: - log.warn('failed to send {!r} on {!r}, closing websocket and will attempt to reconnect', kwargs, self.url) + log.warning('failed to send %s on %s, closing websocket and will attempt to reconnect', kwargs, self.url) self.ws.close() raise def _dispatch(self, message): - log.debug('dispatching {}', message) + log.debug('dispatching %s', message) try: assert isinstance(message, Mapping), 'incoming message is not a dictionary' assert 'client' in message or 'callback' in message, 'no callback or client in message {}'.format(message) @@ -217,7 +217,7 @@ def fallback(self, message): >>> ws.connect() """ _, exc, _ = sys.exc_info() - log.error('no callback registered for message {!r}, message ignored: {}', message, exc) + log.error('no callback registered for message %s, message ignored: %s', message, exc) @property def connected(self): @@ -242,7 +242,7 @@ def connect(self, max_wait=0): break else: if max_wait: - log.warn('websocket {!r} not connected after {} seconds', self.url, max_wait) + log.warning('websocket %s not connected after %s seconds', self.url, max_wait) def close(self): """ @@ -300,7 +300,7 @@ def subscribe(self, callback, method, *args, **kwargs): paramback = self._callbacks[client].get('paramback') params = self.preprocess(method, paramback() if paramback else (args or kwargs)) - self._callbacks[client].setdefault('errback', lambda result: log.error('{}(*{}, **{}) returned an error: {!r}', method, args, kwargs, result)) + self._callbacks[client].setdefault('errback', lambda result: log.error('%s(*%s, **%s) returned an error: %s', method, args, kwargs, result)) self._callbacks[client].update({ 'method': method, 'params': params @@ -309,7 +309,7 @@ def subscribe(self, callback, method, *args, **kwargs): try: self._send(method=method, params=params, client=client) except: - log.warn('initial subscription to {} at {!r} failed, will retry on reconnect', method, self.url) + log.warning('initial subscription to %s at %s failed, will retry on reconnect', method, self.url) return client @@ -635,4 +635,4 @@ def refresh(self): try: self._callback(self.ws.call(self.method, *self.args, **self.kwargs), ws) except: - log.warn('failed to fetch latest data from {} on {}', self.method, ws.url) + log.warning('failed to fetch latest data from %s on %s', self.method, ws.url) diff --git a/sideboard/lib/sa/__init__.py b/sideboard/lib/sa/__init__.py index 2e23908..dd8dce7 100644 --- a/sideboard/lib/sa/__init__.py +++ b/sideboard/lib/sa/__init__.py @@ -11,6 +11,7 @@ from sqlalchemy.ext import declarative from sqlalchemy.dialects import postgresql from sqlalchemy.orm import Query, sessionmaker, configure_mappers +from sqlalchemy.orm.decl_base import _declarative_constructor from sqlalchemy.types import TypeDecorator, String, DateTime, CHAR, Unicode from sideboard.lib import log, config @@ -52,6 +53,7 @@ class CoerceUTF8(TypeDecorator): before passing off to the database. """ impl = Unicode + cache_ok = True def process_bind_param(self, value, dialect): if isinstance(value, type(b'')): @@ -66,6 +68,7 @@ class UUID(TypeDecorator): CHAR(32), storing as stringified hex values. """ impl = CHAR + cache_ok = True def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': @@ -93,6 +96,7 @@ def process_result_value(self, value, dialect): class JSON(TypeDecorator): impl = String + cache_ok = True def __init__(self, comparator=None): self.comparator = comparator @@ -131,7 +135,8 @@ def compare_values(self, x, y): else: class UTCDateTime(TypeDecorator): impl = DateTime - + cache_ok = True + def process_bind_param(self, value, engine): if value is not None: return value.astimezone(UTC).replace(tzinfo=None) @@ -174,22 +179,12 @@ def check_constraint_naming_convention(constraint, table): for operator, text in replacements: constraint_name = constraint_name.replace(operator, text) - constraint_name = re.sub('[\W\s]+', '_', constraint_name) + constraint_name = re.sub(r'[\W\s]+', '_', constraint_name) if len(constraint_name) > 32: constraint_name = uuid.uuid5(uuid.NAMESPACE_OID, str(constraint_name)).hex return constraint_name -# SQLAlchemy doesn't expose its default constructor as a nicely importable -# function, so we grab it from the function defaults. -if six.PY2: - _spec_args, _spec_varargs, _spec_kwargs, _spec_defaults = inspect.getargspec(declarative.declarative_base) -else: - _declarative_spec = inspect.getfullargspec(declarative.declarative_base) - _spec_args, _spec_defaults = _declarative_spec.args, _declarative_spec.defaults -declarative_base_constructor = dict(zip(reversed(_spec_args), reversed(_spec_defaults)))['constructor'] - - def declarative_base(*orig_args, **orig_kwargs): """ Replacement for SQLAlchemy's declarative_base, which adds these features: @@ -212,10 +207,10 @@ def __init__(self, *args, **kwargs): """ if '_model' in kwargs: assert kwargs.pop('_model') == self.__class__.__name__ - declarative_base_constructor(self, *args, **kwargs) + _declarative_constructor(self, *args, **kwargs) for attr, col in self.__table__.columns.items(): if kwargs.get(attr) is None and col.default: - self.__dict__.setdefault(attr, col.default.execute()) + self.__dict__.setdefault(attr, col.default.arg(col)) orig_kwargs['cls'] = Mixed if 'name' not in orig_kwargs: diff --git a/sideboard/lib/sa/_crud.py b/sideboard/lib/sa/_crud.py index b7bf6dc..de9ef47 100644 --- a/sideboard/lib/sa/_crud.py +++ b/sideboard/lib/sa/_crud.py @@ -157,7 +157,8 @@ import inspect import collections from copy import deepcopy -from collections import Mapping, defaultdict +from collections.abc import Mapping +from collections import defaultdict from datetime import datetime, date, time from itertools import chain from functools import wraps @@ -193,9 +194,9 @@ def listify_with_count(x, count=None): def mappify(value): if isinstance(value, six.string_types): return {value: True} - elif isinstance(value, collections.Mapping): + elif isinstance(value, collections.abc.Mapping): return value - elif isinstance(value, collections.Iterable): + elif isinstance(value, collections.abc.Iterable): return {v: True for v in value} else: raise TypeError('unknown datatype: {}', value) @@ -308,7 +309,7 @@ def normalize_object_graph(graph): return {graph: True} elif isinstance(graph, dict): return graph - elif isinstance(graph, collections.Iterable): + elif isinstance(graph, collections.abc.Iterable): return dict([(str(i), True) for i in graph]) else: return None @@ -505,9 +506,9 @@ def normalize_data(data, count=1): else: if isinstance(data, six.string_types): data = [{data: True}] - elif isinstance(data, collections.Mapping): + elif isinstance(data, collections.abc.Mapping): data = [data] - elif isinstance(data, collections.Iterable): + elif isinstance(data, collections.abc.Iterable): if any(isinstance(element, six.string_types) for element in data): # this is the singular list of strings case, so wrap it and # go from there @@ -629,7 +630,7 @@ def wrapped(*args, **kwargs): except: a = [x for x in (args or [])] kw = {k: v for k, v in (kwargs or {}).items()} - log.error('Error calling {}.{} {!r} {!r}'.format(fn.__module__, fn.__name__, a, kw), exc_info=True) + log.error('Error calling %s.%s %s %s'.format(fn.__module__, fn.__name__, a, kw), exc_info=True) exc_class, exc, tb = sys.exc_info() raise six.reraise(CrudException, CrudException(str(exc)), tb) return wrapped @@ -674,7 +675,7 @@ def _collect_models(cls, query): try: model = Session.resolve_model(d['_model']) except: - log.debug('unable to resolve model {} in query {}', d.get('_model'), d) + log.debug('unable to resolve model %s in query %s', d.get('_model'), d) else: models.add(model) for attr_name in collect_fields(d): @@ -763,7 +764,7 @@ def _resolve_comparison(cls, comparison, column, value): if isinstance(value, dict): model_class = Session.resolve_model(value.get('_model')) field = value.get('select', 'id') - value = select([getattr(model_class, field)], cls._resolve_filters(value)) + value = select(getattr(model_class, field)).where(cls._resolve_filters(value)) return { 'eq': lambda field, val: field == val, @@ -789,7 +790,7 @@ def _resolve_comparison(cls, comparison, column, value): @classmethod def _resolve_filters(cls, filters, model=None): model = Session.resolve_model(filters.get('_model', model)) - table = class_mapper(model).mapped_table + table = class_mapper(model).persist_selectable and_clauses = filters.get('and', None) or_clauses = filters.get('or', None) if and_clauses: @@ -1222,7 +1223,7 @@ def _create_or_fetch(cls, session, value, **backref_mapping): try: instance = session.query(cls).filter(cls.id == id).first() except: - log.error('Unable to fetch instance based on id value {!r}', value, exc_info=True) + log.error('Unable to fetch instance based on id value %s', value, exc_info=True) raise TypeError('Invalid instance ID type for relation: {0.__name__} (value: {1})'.format(cls, value)) elif isinstance(value, Mapping): # if there's no id, check to see if we're provided a dictionary @@ -1239,19 +1240,19 @@ def _create_or_fetch(cls, session, value, **backref_mapping): except NoResultFound: continue except MultipleResultsFound: - log.error('multiple results found for {} unique constraint: {}', cls.__name__, column_names) + log.error('multiple results found for %s unique constraint: %s', cls.__name__, column_names) raise else: break else: - log.debug('unable to search using unique constraints: {} with {}', column_names, value) + log.debug('unable to search using unique constraints: %s with %s', column_names, value) if instance and id is None and backref_mapping and getattr(instance, backref_name, None) != parent_id: - log.warning('attempting to change the owner of {} without an explicitly passed id; a new {} instance will be used instead', instance, cls.__name__) + log.warning('attempting to change the owner of %s without an explicitly passed id; a new %s instance will be used instead', instance, cls.__name__) instance = None if not instance: - log.debug('creating new: {} with id {}', cls.__name__, id) + log.debug('creating new: %s with id %s', cls.__name__, id) if id is None: instance = cls() else: @@ -1371,7 +1372,7 @@ def _get_one_to_many_foreign_key_attr_name_if_applicable(cls, name): if not getattr(remote_column, 'foreign_keys', set()): # tags don't actually have foreign keys set, but they need to be treated as the same if name == 'tags': - log.debug('special-case handling for tags, returning: {}', remote_column.name) + log.debug('special-case handling for tags, returning: %s', remote_column.name) return remote_column.name else: # the implication here could be that we're the many side of a @@ -1427,7 +1428,7 @@ def _merge_relations(self, name, value, validator=lambda self, name, val: True): if new_inst.id is None or new_inst not in relation: relation.append(new_inst) - elif isinstance(value, (collections.Mapping, six.string_types)): + elif isinstance(value, (collections.abc.Mapping, six.string_types)): if backref_id_name is not None and not value.get(backref_id_name): # if this is a dictionary, it's possible we're going to be # creating a new thing, if so, we'll add a backref to the @@ -1440,7 +1441,7 @@ def _merge_relations(self, name, value, validator=lambda self, name, val: True): if stale_inst is not None and property.cascade.delete_orphan: session.delete(stale_inst) - if isinstance(value, collections.Mapping): + if isinstance(value, collections.abc.Mapping): relation_inst.from_dict(value, validator) session.flush([relation_inst]) # we want this this to be queryable for other things @@ -1657,19 +1658,19 @@ def __init__(self, can_create=True, be read, and ONLY these names can be read. If not provided (default) all attributes not starting with an underscore (e.g. __str__, or _hidden) will be readable - @type read: C{collections.Iterable} + @type read: C{collections.abc.Iterable} @param no_read: if provided, interpreted as the attribute names that can't be read, taking precedence over anything specified in the read parameter. If not provided (default) everything allowed by the read parameter will be readable - @type no_read: C{collections.Iterable} + @type no_read: C{collections.abc.Iterable} @param update: if provided, interpreted as the attribute names that can be updated, in addition to the list of items are readable. If None (default) default to the list of readable attributes. Pass an empty iterable to use the default behavior listed under the read docstring if there were attributes passed to read that you don't want update to default to - @type update: C{collections.Iterable} + @type update: C{collections.abc.Iterable} @param no_update: if provided, interpreted as the attribute names that can't be updated, taking precedence over anything specified in the update parameter. If None (default) default to the list of @@ -1677,7 +1678,7 @@ def __init__(self, can_create=True, behavior listed under the no_read docstring if there were attributes passed to no_read that you don't want no_update to default to - @type no_update: C{collections.Iterable} + @type no_update: C{collections.abc.Iterable} @param can_delete: if True (default), the decorated class can be deleted @type can_delete: C{bool} @@ -1850,14 +1851,14 @@ def _get_crud_spec(cls): field['type'] = cls._type_map.get(type(attr.property.columns[0].type), 'auto') field_default = getattr(attr.property.columns[0], 'default', None) # only put the default here if it exists, and it's not an automatic thing like "time.utcnow()" - if field_default is not None and field['type'] != 'auto' and not isinstance(field_default.arg, (collections.Callable, property)): + if field_default is not None and field['type'] != 'auto' and not isinstance(field_default.arg, (collections.abc.Callable, property)): field['defaultValue'] = field_default.arg elif hasattr(attr, "default"): field['defaultValue'] = attr.default else: field['type'] = cls._type_map.get(type(attr), 'auto') # only set a default if this isn't a property or some other kind of "constructed attribute" - if field['type'] != 'auto' and not isinstance(attr, (collections.Callable, property)): + if field['type'] != 'auto' and not isinstance(attr, (collections.abc.Callable, property)): field['defaultValue'] = attr if isinstance(attr, InstrumentedAttribute) and isinstance(attr.property, RelationshipProperty): field['_model'] = attr.property.mapper.class_.__name__ diff --git a/sideboard/run_mainloop.py b/sideboard/run_mainloop.py index 664c859..2b3b59d 100644 --- a/sideboard/run_mainloop.py +++ b/sideboard/run_mainloop.py @@ -17,12 +17,12 @@ def mainloop_daemon(): if pid == 0: mainloop() else: - log.debug('writing pid ({}) to pidfile ({})', pid, args.pidfile) + log.debug('writing pid (%s) to pidfile (%s)', pid, args.pidfile) try: with open(args.pidfile, 'w') as f: f.write('{}'.format(pid)) except: - log.error('unexpected error writing pid ({}) to pidfile ({})', pid, args.pidfile, exc_info=True) + log.error('unexpected error writing pid (%s) to pidfile (%s)', pid, args.pidfile, exc_info=True) @entry_point diff --git a/sideboard/tests/__init__.py b/sideboard/tests/__init__.py index 9b48022..8ec5e3f 100644 --- a/sideboard/tests/__init__.py +++ b/sideboard/tests/__init__.py @@ -7,6 +7,7 @@ import sqlalchemy from sqlalchemy import event from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import NullPool from sideboard.lib import config, services @@ -71,7 +72,7 @@ def patch_session(Session, request): name = Session.__module__.split('.')[0] db_path = '/tmp/{}.db'.format(name) - Session.engine = sqlalchemy.create_engine('sqlite+pysqlite:///' + db_path) + Session.engine = sqlalchemy.create_engine('sqlite+pysqlite:///' + db_path, poolclass=NullPool) event.listen(Session.engine, 'connect', lambda conn, record: conn.execute('pragma foreign_keys=ON')) Session.session_factory = sessionmaker(bind=Session.engine, autoflush=False, autocommit=False, query_cls=Session.QuerySubclass) diff --git a/sideboard/tests/test_lib.py b/sideboard/tests/test_lib.py index a1b5a21..4523d55 100644 --- a/sideboard/tests/test_lib.py +++ b/sideboard/tests/test_lib.py @@ -4,7 +4,7 @@ from itertools import count from unittest import TestCase from datetime import datetime, date -from collections import Sequence, Set +from collections.abc import Sequence, Set from threading import current_thread, Thread import six @@ -406,14 +406,6 @@ class Foo(object): assert not is_listy(x) -def test_double_mount(request): - class Root(object): - pass - request.addfinalizer(lambda: cherrypy.tree.apps.pop('/test', None)) - cherrypy.tree.mount(Root(), '/test') - pytest.raises(Exception, cherrypy.tree.mount, Root(), '/test') - - def test_ajaz_serialization(): class Root(object): @ajax @@ -427,7 +419,7 @@ def test_trace_logging(): class TestLocallySubscribes(object): - @pytest.yield_fixture(autouse=True) + @pytest.fixture(autouse=True) def counter(self): _counter = count() diff --git a/sideboard/tests/test_sa.py b/sideboard/tests/test_sa.py index 8ec457f..981d34d 100644 --- a/sideboard/tests/test_sa.py +++ b/sideboard/tests/test_sa.py @@ -7,6 +7,7 @@ import sqlalchemy from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.pool import NullPool from sqlalchemy.orm import relationship from sqlalchemy.types import Boolean, Integer, UnicodeText from sqlalchemy.schema import Column, CheckConstraint, ForeignKey, MetaData, Table, UniqueConstraint @@ -41,7 +42,7 @@ class Boss(Base): @regex_validation('username', r'[0-9a-zA-z]+', 'Usernames may only contain alphanumeric characters') class Account(Base): user_id = Column(UUID(), ForeignKey('user.id', ondelete='RESTRICT'), nullable=False) - user = relationship(User) + user = relationship(User, overlaps="employees") username = Column(UnicodeText(), nullable=False, unique=True) password = Column(UnicodeText(), nullable=False) @@ -90,7 +91,7 @@ class CrudableMixin(object): } ) @text_length_validation('string_model_attr', 2, 100) -@regex_validation('string_model_attr', '^[A-Za-z0-9\.\_\-]+$', 'test thing') +@regex_validation('string_model_attr', r'^[A-Za-z0-9\.\_\-]+$', 'test thing') @text_length_validation('overridden_desc', 1, 100) @text_length_validation('nonexistant_field', 1, 100) class CrudableClass(CrudableMixin, Base): @@ -121,10 +122,10 @@ def string_and_int_hybrid_property(self): @string_and_int_hybrid_property.expression def string_and_int_hybrid_property(cls): - return case([ + return case( (cls.string_model_attr == None, ''), (cls.int_model_attr == None, '') - ], else_=(cls.string_model_attr + ' ' + cls.int_model_attr)) + , else_=(cls.string_model_attr + ' ' + cls.int_model_attr)) @property def unsettable_property(self): @@ -150,7 +151,7 @@ class BasicClassMixedIn(CrudableMixin, Base): class Session(SessionManager): - engine = sqlalchemy.create_engine('sqlite:////tmp/test_sa.db') + engine = sqlalchemy.create_engine('sqlite:////tmp/test_sa.db', poolclass=NullPool) class SessionMixin(object): def user(self, name): @@ -233,6 +234,7 @@ class Foo(WithOverriddenInit): assert Foo().id is None + @pytest.mark.filterwarnings("ignore:Unmanaged access of declarative attribute") def test_declarative_base_without_parameters(self): @declarative_base @@ -241,6 +243,7 @@ class BaseTest: assert BaseTest.__tablename__ == 'base_test' + @pytest.mark.filterwarnings("ignore:Unmanaged access of declarative attribute") def test_declarative_base_with_parameters(self): @declarative_base(name=str('NameOverride')) @@ -673,13 +676,12 @@ def test_update_nonupdatable_attribute(self, db): class TestCrudDelete(object): - def test_delete_cascades_to_tags(self): - pytest.skip('sqlite is not compiled with foreign key support on Jenkins; this test works on my machine but not on Jenkins') + def test_delete_cascades_to_tags(self, db): Session.crud.delete(query_from(db.turner_account)) Session.crud.delete(query_from(db.turner)) with Session() as session: - self.assertEqual(1, session.query(Account).count()) - self.assertEqual(2, session.query(Tag).count()) + assert 1 == session.query(Account).count() + assert 2 == session.query(Tag).count() def test_delete_by_id(self, db): Session.crud.delete({'_model': 'Account', 'field': 'id', 'value': db.turner_account['id']}) diff --git a/sideboard/tests/test_sep.py b/sideboard/tests/test_sep.py index aa557a0..7217846 100644 --- a/sideboard/tests/test_sep.py +++ b/sideboard/tests/test_sep.py @@ -15,7 +15,7 @@ class FakeExit(Exception): class TestSep(object): - @pytest.yield_fixture(autouse=True) + @pytest.fixture(autouse=True) def automocks(self, monkeypatch): monkeypatch.setattr(sep, 'exit', Mock(side_effect=FakeExit), raising=False) prev_argv, prev_points = sys.argv[:], _entry_points.copy() diff --git a/sideboard/tests/test_server.py b/sideboard/tests/test_server.py index 35576ad..946d598 100644 --- a/sideboard/tests/test_server.py +++ b/sideboard/tests/test_server.py @@ -33,7 +33,6 @@ cherrypy.config.update({'server.socket_port': available_port}) -@pytest.mark.functional class SideboardServerTest(TestCase): port = config['cherrypy']['server.socket_port'] jsonrpc_url = 'http://127.0.0.1:{}/jsonrpc'.format(port) @@ -86,25 +85,12 @@ def stop_cherrypy(cls): def setUpClass(cls): super(SideboardServerTest, cls).setUpClass() cls.start_cherrypy() - cls.ws = cls.patch_websocket(services.get_websocket()) - cls.ws.connect(max_wait=5) - assert cls.ws.connected @classmethod def tearDownClass(cls): cls.stop_cherrypy() super(SideboardServerTest, cls).tearDownClass() - @staticmethod - def patch_websocket(ws): - ws.q = Queue() - ws.fallback = ws.q.put - return ws - - def tearDown(self): - while not self.ws.q.empty(): - self.ws.q.get_nowait() - def wait_for(self, func, *args, **kwargs): for i in range(50): cherrypy.engine.publish('main') # since our unit tests don't call cherrypy.engine.block, we must publish this event manually @@ -154,426 +140,12 @@ def get(self, path, **params): def get_json(self, path, **params): return self._get(self.rsess, path, **params).json() - def open_ws(self): - return self.patch_websocket(WebSocket(connect_immediately=True, max_wait=5)) - - def next(self, ws=None, timeout=2): - return (ws or self.ws).q.get(timeout=timeout) - - def assert_incoming(self, ws=None, client=None, timeout=1, **params): - data = self.next(ws, timeout) - assert (client or self.client) == data.get('client') - for key, val in params.items(): - assert val == data[key] - def assert_no_response(self): pytest.raises(Empty, self.next) - def assert_error_with(self, *args, **kwargs): - if args: - self.ws.ws.send(str(args[0])) - else: - self.ws._send(**kwargs) - assert 'error' in self.next() - - def call(self, **params): - callback = 'callback{}'.format(randrange(1000000)) - self.ws._send(callback=callback, **params) - result = self.next() - assert callback == result['callback'] - return result - def subscribe(self, **params): params.setdefault('client', self.client) return self.call(**params) def unsubscribe(self, client=None): self.call(action='unsubscribe', client=client or self.client) - - -class JsonrpcTest(SideboardServerTest): - @pytest.fixture(autouse=True) - def override(self, service_patcher): - service_patcher('testservice', self) - - def get_message(self, name): - return 'Hello {}!'.format(name) - - def send_json(self, body, content_type='application/json'): - if isinstance(body, dict): - body['id'] = self._testMethodName - resp = requests.post(self.jsonrpc_url, data=json.dumps(body), - headers={'Content-Type': 'application/json'}) - assert resp.json - return resp.json() - - def test_rpctools(self): - assert 'Hello World!' == self.jsonrpc.testservice.get_message('World') - - def test_content_types(self): - for ct in ['text/html', 'text/plain', 'application/javascript', 'text/javascript', 'image/gif']: - response = self.send_json({ - 'method': 'testservice.get_message', - 'params': ['World'] - }, content_type=ct) - assert 'Hello World!' == response['result'], 'Expected success with valid reqeust using Content-Type {}'.format(ct) - - -class TestWebsocketSubscriptions(SideboardServerTest): - @pytest.fixture(autouse=True) - def override(self, service_patcher, config_patcher): - config_patcher(1, 'ws.call_timeout') - service_patcher('self', self) - - def echo(self, s): - self.echoes.append(s) - return s - - def slow_echo(self, s): - sleep(2) - return s - - @subscribes('names') - def get_names(self): - return self.names - - def change_name(self, name=None): - self.names[-1] = name or uuid4().hex - notify('names', delay=True) - - @notifies('names') - def change_name_then_error(self): - self.names[:] = reversed(self.names) - self.fail() - - def indirectly_change_name(self): - self.change_name(uuid4().hex) - - @subscribes('places') - def get_places(self): - return self.places - - @notifies('places') - def change_place(self): - self.places[0] = uuid4().hex - - @subscribes('names', 'places') - def get_names_and_places(self): - return self.names + self.places - - def setUp(self): - SideboardServerTest.setUp(self) - self.echoes = [] - self.places = ['Here'] - self.names = ['Hello', 'World'] - self.client = self._testMethodName - - def test_echo(self): - self.ws._send(method='self.echo', params='hello') - self.ws._send(method='self.echo', params=['hello']) - self.ws._send(method='self.echo', params={'s': 'hello'}) - self.assert_no_response() - self.ws._send(method='self.echo', params='hello', callback='cb123') - self.next() - assert ['hello'] * 4 == self.echoes - - def test_errors(self): - self.assert_error_with(0) - self.assert_error_with([]) - self.assert_error_with('') - self.assert_error_with('x') - self.assert_error_with(None) - - self.assert_error_with(method='missing') - self.assert_error_with(method='close_all') - self.assert_error_with(method='crud.missing') - self.assert_error_with(method='too.many.dots') - self.assert_error_with(method='self.echo.extra') - - self.assert_error_with(method='self.echo') - self.assert_error_with(method='self.echo', params=['too', 'many']) - self.assert_error_with(method='self.echo', params={'invalid': 'name'}) - self.assertEqual([], self.echoes) - - self.assert_error_with(method='self.fail') - - def test_callback(self): - result = self.call(method='self.echo', params='hello') - assert 'hello' == result['data'] - assert 'client' not in result - - result = self.call(method='crud.echo', params='hello', client='ds123') - assert 'ds123' == result['client'] - - def test_client_and_callback(self): - self.call(method='self.get_name', client=self.client) - self.assert_no_response() - - def test_triggered(self): - self.subscribe(method='self.get_names') - with self.open_ws() as other_ws: - other_ws._send(method='self.change_name', params=['Kitty']) - self.assert_incoming() - - def test_indirect_trigger(self): - self.subscribe(method='self.get_names') - with self.open_ws() as other_ws: - other_ws._send(method='self.indirectly_change_name') - self.assert_incoming() - - def test_unsubscribe(self): - self.test_triggered() - self.unsubscribe() - self.call(method='self.change_name', params=[uuid4().hex]) - self.assert_no_response() - - def test_errors_still_triggers(self): - with self.open_ws() as other_ws: - self.subscribe(method='self.get_names') - other_ws._send(method='self.change_name_then_error') - self.assert_incoming() - - def test_triggered_error(self): - with self.open_ws() as other_ws: - self.subscribe(method='self.get_names') - self.names.append(object()) - other_ws._send(method='self.change_name_then_error') - self.names[:] = ['Hello'] * 2 - other_ws._send(method='self.change_name') - self.assert_incoming() - - def test_multiple_subscriptions(self): - self.subscribe(method='self.get_names') - self.subscribe(method='self.get_places') - self.assert_no_response() - with self.open_ws() as other_ws: - other_ws._send(method='self.change_name') - self.assert_incoming() - other_ws._send(method='self.change_place') - self.assert_incoming() - other_ws._send(method='self.echo', params='Hello') - self.assert_no_response() - - def test_multiple_triggers(self): - self.subscribe(method='self.get_names_and_places') - self.assert_no_response() - with self.open_ws() as other_ws: - other_ws._send(method='self.change_name') - self.assert_incoming() - other_ws._send(method='self.change_place') - self.assert_incoming() - other_ws._send(method='self.echo', params='Hello') - self.assert_no_response() - - def test_multiple_clients(self): - self.subscribe(method='self.get_names', client='client1') - self.subscribe(method='self.get_names', client='client2') - self.assert_no_response() - with self.open_ws() as other_ws: - other_ws._send(method='self.change_name') - assert {'client1', 'client2'} == {self.next()['client'], self.next()['client']} - - def test_nonlocking_echo(self): - self.ws._send(method='self.slow_echo', params=['foo'], - client='client1', callback='cb11') - sleep(1) - self.ws._send(method='self.echo', params=['bar'], client='client2', - callback='cb22') - self.assert_incoming(data='bar', client='client2') - self.assert_incoming(data='foo', client='client1', timeout=2) - - def test_client_locking(self): - self.ws._send(method='self.slow_echo', params=['foo'], - client=self.client, callback='cb1') - sleep(1) - self.ws._send(method='self.echo', params=['bar'], - client=self.client, callback='cb2') - self.assert_incoming(data='foo', timeout=2) - self.assert_incoming(data='bar') - - def test_jsonrpc_notification(self): - self.subscribe(method='self.get_names') - self.jsonrpc.self.change_name() - self.assert_incoming() - - def test_jsonrpc_websocket_client(self): - self.addCleanup(setattr, self.jsonrpc, "_prepare_request", - self.jsonrpc._prepare_request) - self.jsonrpc._prepare_request = lambda data, headers: data.update( - {'websocket_client': self.client}) - self.jsonrpc.self.change_name() - self.assert_no_response() - - -class TestWebsocketCall(SideboardServerTest): - @pytest.fixture(autouse=True) - def override(self, service_patcher, config_patcher): - config_patcher(1, 'ws.call_timeout') - service_patcher('test', self) - - def fast(self): - return 'fast' - - def slow(self): - sleep(2) - return 'slow' - - def test_fast(self): - assert self.ws.call('test.fast') == 'fast' - - def test_slow(self): - pytest.raises(Exception, self.ws.call, 'test.slow') - - -class TestWebsocketsCrudSubscriptions(SideboardServerTest): - @pytest.fixture(autouse=True) - def override(self, service_patcher): - class MockCrud: - pass - mr = self.mr = MockCrud() - for name in ['create', 'update', 'delete']: - setattr(mr, name, Session.crud.crud_notifies(self.make_crud_method(name))) - for name in ['read', 'count']: - setattr(mr, name, Session.crud.crud_subscribes(self.make_crud_method(name))) - service_patcher('crud', mr) - - def setUp(self): - SideboardServerTest.setUp(self) - self.ws.close() - self.ws = self.open_ws() - self.client = self._testMethodName - - def make_crud_method(self, name): - def crud_method(*args, **kwargs): - log.debug('mocked crud.{}'.format(name)) - assert not getattr(self.mr, name + '_error', False) - return uuid4().hex - - crud_method.__name__ = name.encode('utf-8') if six.PY2 else name - return crud_method - - def models(self, *models): - return [{'_model': model} for model in models] - - def read(self, *models): - self.ws._send(method='crud.read', client=self.client, params=self.models(*models)) - self.assert_incoming(trigger='subscribe') - - def update(self, *models, **kwargs): - client = kwargs.get('client', 'unique_client_' + uuid4().hex) - self.ws._send(method='crud.update', client=client, params=self.models(*models)) - self.assert_incoming(client=client) - - def test_read(self): - self.read('User') - self.assert_no_response() - - def test_triggered_read(self): - self.read('User') - self.update('User') - self.assert_incoming(trigger='update') - - def test_unsubscribe(self): - self.test_triggered_read() - self.unsubscribe() - self.update('User') - self.assert_no_response() - - def test_triggered_error(self): - self.mr.update_error = True - with self.open_ws() as other_ws: - other_ws._send(method='crud.read', client='other_tte', params=self.models('User')) - self.assert_incoming(other_ws, client='other_tte') - self.update('User') - self.ws._send(method='crud.update', client=self.client, params=self.models('User')) - assert 'error' in self.next() - self.assert_incoming(other_ws, client='other_tte', trigger='update') - - def test_indirect_trigger(self): - def account(*attrs): - if len(attrs) == 1: - return {'_model': 'Account', 'field': attrs[0]} - else: - return {'_model': 'Account', - 'or': [{'field': attr} for attr in attrs]} - - def call(*attrs): - self.call(method='crud.read', client=self.client, params=account(*attrs)) - - def assert_update_triggers(model): - self.update(model) - self.assert_incoming() - - call('xxx') - assert_update_triggers('Account') - self.unsubscribe() - - call('user.xxx') - assert_update_triggers('User') - assert_update_triggers('Account') - self.unsubscribe() - - call('user.xxx', 'boss.xxx') - assert_update_triggers('Account') - assert_update_triggers('User') - assert_update_triggers('Account') - self.unsubscribe() - - call('user.tags.xxx') - assert_update_triggers('Account') - assert_update_triggers('User') - assert_update_triggers('Tag') - - self.update('Boss') - self.assert_no_response() - - def test_trigger_and_callback(self): - result = self.call(method='crud.read', params=self.models('User'), client='ds_ttac') - self.assert_no_response() - - def test_multiple_triggers(self): - self.read('User', 'Boss') - self.update('User') - self.assert_incoming() - self.update('Boss') - self.assert_incoming() - self.update('Account') - self.assert_no_response() - - def test_trigger_changed(self): - self.read('User') - self.read('Boss') - self.update('User') - self.assert_no_response() - self.update('Boss') - self.assert_incoming() - self.assert_no_response() - - def test_multiple_clients(self): - self.read('Boss') - self.ws._send(method='crud.read', client='other_tmc', params=self.models('Boss')) - self.assert_incoming(client='other_tmc') - self.update('User') - self.assert_no_response() - self.read('Boss') - self.ws._send(method='crud.update', client='unused_client', params=self.models('Boss')) - self.next() - assert {self.client, 'other_tmc'} == {self.next()['client'], self.next()['client']} - - def test_broadcast_error(self): - with self.open_ws() as other_ws: - self.read('User') - other_ws._send(method='crud.count', client='other_tbe', params=self.models('User')) - self.assert_incoming(other_ws, client='other_tbe') - self.mr.count_error = True - self.update('User', client='other_client_so_everything_will_trigger') - self.assert_incoming(trigger='update', timeout=5) - - def test_jsonrpc_notifications(self): - self.read('User') - self.jsonrpc.crud.delete({'_model': 'User', 'field': 'name', 'value': 'Does Not Exist'}) - self.assert_incoming(trigger='delete') - - self.jsonrpc._prepare_request = lambda data, headers: data.update({'websocket_client': self.client}) - self.jsonrpc.crud.delete({'_model': 'User', 'field': 'name', 'value': 'Does Not Exist'}) - self.assert_no_response() diff --git a/sideboard/tests/test_websocket.py b/sideboard/tests/test_websocket.py index 6f87e30..e6bb5b6 100644 --- a/sideboard/tests/test_websocket.py +++ b/sideboard/tests/test_websocket.py @@ -18,7 +18,7 @@ def reset_stopped(): @pytest.fixture def ws(monkeypatch): ws = WebSocket(connect_immediately=False) - monkeypatch.setattr(log, 'warn', Mock()) + monkeypatch.setattr(log, 'warning', Mock()) ws._send = Mock() ws._next_id = lambda prefix: 'xxx' return ws @@ -43,7 +43,7 @@ def test_subscribe_basic(ws): 'params': ('x', 'y') } ws._send.assert_called_with(method='foo.bar', params=('x', 'y'), client='xxx') - assert not log.warn.called + assert not log.warning.called def test_subscribe_advanced(ws): @@ -62,14 +62,14 @@ def test_subscribe_advanced(ws): 'params': ('x', 'y') } ws._send.assert_called_with(method='foo.bar', params=('x', 'y'), client='yyy') - assert not log.warn.called + assert not log.warning.called def test_subscribe_error(ws): ws._send = Mock(side_effect=Exception) ws.subscribe(Mock(), 'foo.bar') assert 'xxx' in ws._callbacks - assert log.warn.called + assert log.warning.called def test_subscribe_paramback(ws): @@ -91,7 +91,7 @@ def test_subscribe_paramback(ws): 'params': (5, 6) } ws._send.assert_called_with(method='foo.bar', params=(5, 6), client='yyy') - assert not log.warn.called + assert not log.warning.called def test_unsubscribe(ws): diff --git a/sideboard/tests/test_websocket_dispatcher.py b/sideboard/tests/test_websocket_dispatcher.py index 8a32e7f..874e2af 100644 --- a/sideboard/tests/test_websocket_dispatcher.py +++ b/sideboard/tests/test_websocket_dispatcher.py @@ -21,7 +21,7 @@ def mock_wsd(): return wsd -@pytest.yield_fixture(autouse=True) +@pytest.fixture(autouse=True) def cleanup(): yield threadlocal.reset() @@ -101,11 +101,11 @@ def test_multi_broadcast(self, ws1, ws2, ws3, ws4): assert ws1.trigger.called and ws2.trigger.called and not ws3.trigger.called and not ws4.trigger.called def test_broadcast_error(self, ws4, monkeypatch): - monkeypatch.setattr(log, 'warn', Mock()) + monkeypatch.setattr(log, 'warning', Mock()) WebSocketDispatcher.broadcast('foo') - assert not ws4.trigger.called and not log.warn.called + assert not ws4.trigger.called and not log.warning.called WebSocketDispatcher.broadcast('baf') - assert ws4.trigger.called and log.warn.called and not ws4.unsubscribe_all.called + assert ws4.trigger.called and log.warning.called and not ws4.unsubscribe_all.called def test_broadcast_closed(self, ws1, ws2): ws1.is_closed = True @@ -326,7 +326,7 @@ def test_update_triggers_with_error(up): @pytest.fixture def act(wsd, monkeypatch): wsd.unsubscribe = Mock() - monkeypatch.setattr(log, 'warn', Mock()) + monkeypatch.setattr(log, 'warning', Mock()) return wsd @@ -334,19 +334,19 @@ def test_unsubscribe_action(act): act.unsubscribe = Mock() act.internal_action('unsubscribe', 'xxx', 'yyy') act.unsubscribe.assert_called_with('xxx') - assert not log.warn.called + assert not log.warning.called def test_unknown_action(act): act.internal_action('does_not_exist', 'xxx', 'yyy') assert not act.unsubscribe.called - assert log.warn.called + assert log.warning.called def test_no_action(act): act.internal_action(None, 'xxx', 'yyy') assert not act.unsubscribe.called - assert not log.warn.called + assert not log.warning.called @pytest.fixture diff --git a/sideboard/websockets.py b/sideboard/websockets.py index 382b0f6..ac6c4c6 100755 --- a/sideboard/websockets.py +++ b/sideboard/websockets.py @@ -401,10 +401,6 @@ def __init__(self, *args, **kwargs): session_fields: We copy session data for the currently-authenticated user who made the incoming websocket connection; by default we only copy the username, but this can be overridden in configuration. - Remember that Sideboard exposes two websocket handlers at /ws and - /wsrpc, with /ws being auth-protected (so the username field will be - meaningful) and /wsrpc being client-cert protected (so the username - will always be 'rpc'). header_fields: We copy header fields from the request that initiated the websocket connection. @@ -509,7 +505,7 @@ def broadcast(cls, channels, trigger=None, originating_client=None): try: websocket.trigger(client=client, callback=callback, trigger=trigger) except: - log.warn('ignoring unexpected trigger error', exc_info=True) + log.warning('ignoring unexpected trigger error', exc_info=True) @property def is_closed(self): @@ -568,7 +564,7 @@ def send(self, **message): debug message and then exit without error. """ if self.is_closed: - log.debug('ignoring send on an already closed websocket: {}', message) + log.debug('ignoring send on an already closed websocket: %s', message) self.unsubscribe_all() return @@ -582,7 +578,7 @@ def send(self, **message): if cached_fingerprint == fingerprint and repeat_send: return - log.debug('sending {}', message) + log.debug('sending %s', message) message = json.dumps(message, cls=sideboard.lib.serializer, separators=(',', ':'), sort_keys=True) with self.send_lock: @@ -595,7 +591,7 @@ def closed(self, code, reason=''): subscriptions, remove this websocket from the registry of instances, and log a message before closing. """ - log.info('closing: code={!r} reason={!r}', code, reason) + log.info('closing: code=%s reason=%s', code, reason) self.instances.discard(self) self.unsubscribe_all() WebSocket.closed(self, code, reason) @@ -685,7 +681,7 @@ def internal_action(self, action, client, callback): if action == 'unsubscribe': self.unsubscribe(client) elif action is not None: - log.warn('unknown action {!r}', action) + log.warning('unknown action %s', action) def clear_cached_response(self, client, callback): """ @@ -715,7 +711,7 @@ def received_message(self, message): log.error(message) self.send(error=message) else: - log.debug('received {}', fields) + log.debug('received %s', fields) responder.defer(self, fields) def handle_message(self, message): diff --git a/test_requirements.txt b/test_requirements.txt index 3c0c982..805dd9c 100644 --- a/test_requirements.txt +++ b/test_requirements.txt @@ -1,5 +1,5 @@ -pytest>=3.0.1 +pytest>=8.1.1 mock>=1.0.1,<1.1 -Sphinx>=1.2.1 -coverage>=3.6 -pep8>=1.7.0 +Sphinx>=7.2.6 +coverage>=7.4.4 +pep8>=1.7.1