Commit f1138019 authored by Lukáš Lalinský's avatar Lukáš Lalinský

Run tests under python 3

parent d33335f3
Pipeline #20504 failed with stages
in 3 minutes and 57 seconds
/.vagrant
/.mypy_cache
/e
/venv*
/build
/acoustid.conf
/acoustid-test.conf
......@@ -9,3 +10,4 @@
*.swp
*.so
*.o
.noseids
{
"python.pythonPath": "venv3/bin/python",
"python.linting.pylintEnabled": false,
"python.linting.flake8Enabled": true,
"python.linting.flake8Path": "venv3/bin/flake8",
"python.linting.mypyEnabled": true,
"python.linting.mypyPath": "venv3/bin/mypy",
"python.linting.enabled": true,
"python.testing.nosetestsEnabled": true,
"python.testing.nosetestPath": "venv3/bin/nosetests",
"python.testing.nosetestArgs": [
"tests"
],
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": false,
}
\ No newline at end of file
env:
virtualenv -p python e
e/bin/pip install pip-tools
$(MAKE) sync-reqs
venv: venv-py2 venv-py3
update-reqs:
e/bin/pip-compile --output-file requirements.txt requirements.in
e/bin/pip-compile --output-file requirements_dev.txt requirements_dev.in
venv-py2:
virtualenv -p python2 venv2
venv2/bin/pip install pip-tools
$(MAKE) sync-reqs-py2
sync-reqs:
e/bin/pip-sync requirements.txt requirements_dev.txt
venv-py3:
virtualenv -p python3 venv3
venv3/bin/pip install pip-tools
$(MAKE) sync-reqs-py3
update-reqs: update-reqs-py2 update-reqs-py3
update-reqs-py2:
venv2/bin/pip-compile --output-file requirements_py2.txt requirements.in
venv2/bin/pip-compile --output-file requirements_dev_py2.txt requirements_dev.in
update-reqs-py3:
venv3/bin/pip-compile --output-file requirements_py3.txt requirements.in
venv3/bin/pip-compile --output-file requirements_dev_py3.txt requirements_dev.in
sync-reqs: sync-reqs-py2 sync-reqs-py3
sync-reqs-py2:
venv2/bin/pip-sync requirements_py2.txt requirements_dev_py2.txt
sync-reqs-py3:
venv3/bin/pip-sync requirements_py3.txt requirements_dev_py3.txt
lint:
tox -e flake8,mypy-py2,mypy-py3
tox -e flake8-py2,flake8-py3,mypy-py2,mypy-py3
check:
tox
.PHONY: env update-reqs sync-reqs lint check
clean:
rm -rf venv2 venv3
.PHONY: venv-py2 venv-py3 update-reqs-py2 update-reqs-py3 sync-reqs-venv-py2 sync-reqs-venv-py3 lint check
......@@ -32,7 +32,7 @@ port=5432
[logging]
level=WARNING
level.sqlalchemy=WARNING
syslog=yes
syslog=false
syslog_facility=local1
[index]
......
......@@ -5,6 +5,8 @@ import six
import logging
import json
import xml.etree.cElementTree as etree
from typing import Union, List, Dict, Any
from six import BytesIO
from werkzeug.wrappers import Request, Response
from acoustid.handler import Handler
from acoustid.utils import singular
......@@ -14,6 +16,7 @@ logger = logging.getLogger(__name__)
def _serialize_xml_node(parent, data):
# type: (etree.Element, Union[List[Any], Dict[str, Any], int, float, str]) -> None
if isinstance(data, dict):
_serialize_xml_dict(parent, data)
elif isinstance(data, list):
......@@ -23,7 +26,8 @@ def _serialize_xml_node(parent, data):
def _serialize_xml_dict(parent, data):
for name, value in data.items():
# type: (etree.Element, Dict[str, Any]) -> None
for name, value in sorted(data.items()):
if name.startswith('@'):
parent.attrib[name[1:]] = six.text_type(value)
else:
......@@ -32,6 +36,7 @@ def _serialize_xml_dict(parent, data):
def _serialize_xml_list(parent, data):
# type: (etree.Element, List[Any]) -> None
name = singular(parent.tag)
for item in data:
elem = etree.SubElement(parent, name)
......@@ -39,13 +44,17 @@ def _serialize_xml_list(parent, data):
def serialize_xml(data, **kwargs):
# type: (Union[List[Any], Dict[str, Any]], **Any) -> Response
root = etree.Element('response')
_serialize_xml_node(root, data)
res = etree.tostring(root, encoding="UTF-8")
return Response(res, content_type='text/xml; charset=UTF-8', **kwargs)
tree = etree.ElementTree(root)
res = BytesIO()
tree.write(res, encoding='UTF-8', xml_declaration=True)
return Response(res.getvalue(), content_type='text/xml; charset=UTF-8', **kwargs)
def serialize_json(data, callback=None, **kwargs):
# type: (Union[List[Any], Dict[str, Any], int, float, str], str, **Any) -> Response
res = json.dumps(data, sort_keys=True)
if callback:
res = '%s(%s)' % (callback, res)
......@@ -56,6 +65,7 @@ def serialize_json(data, callback=None, **kwargs):
def serialize_response(data, format, **kwargs):
# type: (Union[List[Any], Dict[str, Any]], str, **Any) -> Response
if format == 'json':
return serialize_json(data, **kwargs)
elif format.startswith('jsonp:'):
......
......@@ -40,7 +40,7 @@ DEMO_APPLICATION_ID = 2
def iter_args_suffixes(args, *prefixes):
results = set()
for name in args.iterkeys():
for name in args.keys():
for prefix in prefixes:
if name == prefix:
results.add(None)
......@@ -117,7 +117,7 @@ class APIHandler(Handler):
if application_id is not None:
application_rate_limit = self.ctx.config.rate_limiter.applications.get(application_id)
if application_rate_limit is not None:
if self.rate_limiter.limit('app', application_id, application_rate_limit):
if self.rate_limiter.limit('app', str(application_id), application_rate_limit):
raise errors.TooManyRequests(application_rate_limit)
else:
return
......@@ -217,7 +217,7 @@ class LookupHandler(APIHandler):
el_recording = {}
res_map = {}
track_mbid_map = lookup_mbids(self.conn, self.el_result.keys())
for track_id, mbids in track_mbid_map.iteritems():
for track_id, mbids in track_mbid_map.items():
res_map[track_id] = []
for mbid, sources in mbids:
res_map[track_id].append(mbid)
......@@ -235,7 +235,7 @@ class LookupHandler(APIHandler):
def _inject_user_meta_ids_internal(self, add=True):
el_recording = {}
track_meta_map = lookup_meta_ids(self.conn, self.el_result.keys())
for track_id, meta_ids in track_meta_map.iteritems():
for track_id, meta_ids in track_meta_map.items():
for meta_id in meta_ids:
if add:
for result_el in self.el_result[track_id]:
......@@ -428,13 +428,13 @@ class LookupHandler(APIHandler):
def _group_metadata(self, metadata, track_mbid_map):
results = {}
for track_id, mbids in track_mbid_map.iteritems():
for track_id, mbids in track_mbid_map.items():
mbids = set(mbids)
results[track_id] = []
for item in metadata:
if item['recording_id'] in mbids:
results[track_id].append(item)
return results.iteritems()
return results.items()
def _group_release_groups(self, metadata, only_ids=False):
results = {}
......@@ -443,7 +443,7 @@ class LookupHandler(APIHandler):
if id not in results:
results[id] = (self.extract_release_group(item, only_id=only_ids), [])
results[id][1].append(item)
return results.itervalues()
return results.values()
def _group_recordings(self, metadata, only_ids=False):
results = {}
......@@ -452,7 +452,7 @@ class LookupHandler(APIHandler):
if id not in results:
results[id] = (self.extract_recording(item, only_id=only_ids), [])
results[id][1].append(item)
return results.itervalues()
return results.values()
def _group_releases(self, metadata, only_ids=False):
results = {}
......@@ -461,7 +461,7 @@ class LookupHandler(APIHandler):
if id not in results:
results[id] = (self.extract_release(item, only_id=only_ids), [])
results[id][1].append(item)
return results.itervalues()
return results.values()
def _group_tracks(self, metadata):
results = {}
......@@ -483,7 +483,7 @@ class LookupHandler(APIHandler):
'artists': item['track_artists'],
}
medium['tracks'].append(track)
return results.itervalues()
return results.values()
def inject_m2(self, meta):
el_recording = self._inject_recording_ids_internal(True)[0]
......@@ -707,7 +707,7 @@ class SubmitHandler(APIHandler):
'source_id': source_id
}
meta_values = dict((n, p[n] or None) for n in self.meta_fields)
if any(meta_values.itervalues()):
if any(meta_values.values()):
values['meta_id'] = insert_meta(fingerprint_db, meta_values)
if p['foreignid']:
values['foreignid_id'] = find_or_insert_foreignid(fingerprint_db, p['foreignid'])
......
......@@ -7,11 +7,13 @@ from acoustid.scripts.import_submissions import run_import
@click.group()
def cli():
# type: () -> None
pass
@cli.group()
def run():
# type: () -> None
pass
......@@ -19,6 +21,7 @@ def run():
@click.option('-c', '--config', default='acoustid.conf', envvar='ACOUSTID_CONFIG')
@click.option('-w', '--workers', type=int, envvar='ACOUSTID_WEB_WORKERS')
def run_web_cmd(config, workers):
# type: (str, int) -> None
"""Run production uWSGI with the website."""
script = Script(config)
script.setup_console_logging()
......@@ -30,6 +33,7 @@ def run_web_cmd(config, workers):
@click.option('-c', '--config', default='acoustid.conf', envvar='ACOUSTID_CONFIG')
@click.option('-w', '--workers', type=int, envvar='ACOUSTID_API_WORKERS')
def run_api_cmd(config, workers):
# type: (str, int) -> None
"""Run production uWSGI with the API."""
script = Script(config)
script.setup_console_logging()
......@@ -40,6 +44,7 @@ def run_api_cmd(config, workers):
@run.command('cron')
@click.option('-c', '--config', default='acoustid.conf', envvar='ACOUSTID_CONFIG')
def run_cron_cmd(config):
# type: (str) -> None
"""Run cron."""
script = Script(config)
script.setup_console_logging()
......@@ -50,6 +55,7 @@ def run_cron_cmd(config):
@run.command('import')
@click.option('-c', '--config', default='acoustid.conf', envvar='ACOUSTID_CONFIG')
def run_import_cmd(config):
# type: (str) -> None
"""Run import."""
script = Script(config)
script.setup_console_logging()
......@@ -60,6 +66,7 @@ def run_import_cmd(config):
@cli.command('shell')
@click.option('-c', '--config', default='acoustid.conf', envvar='ACOUSTID_CONFIG')
def shell_cmd(config):
# type: (str) -> None
"""Run shell."""
import IPython
script = Script(config)
......@@ -71,4 +78,5 @@ def shell_cmd(config):
def main():
# type: () -> None
cli()
......@@ -3,8 +3,10 @@
import os.path
import logging
from six.moves import configparser as ConfigParser
from typing import Any, Callable, Dict, List, Optional
from six.moves.configparser import RawConfigParser
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.engine.url import URL
from acoustid.const import DEFAULT_GLOBAL_RATE_LIMIT
......@@ -13,10 +15,12 @@ logger = logging.getLogger(__name__)
def str_to_bool(x):
# type: (str) -> bool
return x.lower() in ('1', 'on', 'true')
def read_env_item(obj, key, name, convert=None):
# type: (Any, str, str, Callable[[str], Any]) -> None
value = None
if name in os.environ:
value = os.environ[name]
......@@ -33,19 +37,23 @@ def read_env_item(obj, key, name, convert=None):
class BaseConfig(object):
def read(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_section(section):
self.read_section(parser, section)
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
pass
def read_env(self, prefix):
# type: (str) -> None
pass
class DatabasesConfig(BaseConfig):
def __init__(self):
# type: () -> None
self.databases = {
'app': DatabaseConfig(),
'fingerprint': DatabaseConfig(),
......@@ -55,7 +63,8 @@ class DatabasesConfig(BaseConfig):
self.use_two_phase_commit = False
def create_engines(self, **kwargs):
engines = {}
# type: (**Any) -> Dict[str, Engine]
engines = {} # type: Dict[str, Engine]
for name, db_config in self.databases.items():
for other_name, other_db_config in self.databases.items():
if other_name in engines and other_db_config == db_config:
......@@ -66,6 +75,7 @@ class DatabasesConfig(BaseConfig):
return engines
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'two_phase_commit'):
self.use_two_phase_commit = parser.getboolean(section, 'two_phase_commit')
for name, sub_config in self.databases.items():
......@@ -73,6 +83,7 @@ class DatabasesConfig(BaseConfig):
sub_config.read_section(parser, sub_section)
def read_env(self, prefix):
# type: (str) -> None
read_env_item(self, 'use_two_phase_commit', prefix + 'DATABASE_TWO_PHASE_COMMIT', convert=str_to_bool)
for name, sub_config in self.databases.items():
sub_prefix = prefix + 'DATABASE_' + name.upper() + '_'
......@@ -82,31 +93,36 @@ class DatabasesConfig(BaseConfig):
class DatabaseConfig(BaseConfig):
def __init__(self):
self.user = None
# type: () -> None
self.user = 'acoustid'
self.superuser = 'postgres'
self.name = None
self.host = None
self.port = None
self.password = None
self.pool_size = None
self.pool_recycle = None
self.pool_pre_ping = None
self.name = 'acoustid'
self.host = 'localhost'
self.port = 5432
self.password = ''
self.pool_size = None # type: Optional[int]
self.pool_recycle = None # type: Optional[int]
self.pool_pre_ping = None # type: Optional[bool]
def __eq__(self, other):
# type: (object) -> bool
if not isinstance(other, DatabaseConfig):
return False
return (
self.user == other.user,
self.superuser == other.superuser,
self.name == other.name,
self.host == other.host,
self.port == other.port,
self.password == other.password,
self.pool_size == other.pool_size,
self.pool_recycle == other.pool_recycle,
self.pool_pre_ping == other.pool_pre_ping,
self.user == other.user and
self.superuser == other.superuser and
self.name == other.name and
self.host == other.host and
self.port == other.port and
self.password == other.password and
self.pool_size == other.pool_size and
self.pool_recycle == other.pool_recycle and
self.pool_pre_ping == other.pool_pre_ping
)
def create_url(self, superuser=False):
kwargs = {}
# type: (bool) -> URL
kwargs = {} # type: Dict[str, Any]
if superuser:
kwargs['username'] = self.superuser
else:
......@@ -121,6 +137,7 @@ class DatabaseConfig(BaseConfig):
return URL('postgresql', **kwargs)
def create_engine(self, superuser=False, **kwargs):
# type: (bool, **Any) -> Engine
if self.pool_size is not None and 'pool_size' not in kwargs:
kwargs['pool_size'] = self.pool_size
if self.pool_recycle is not None and 'pool_recycle' not in kwargs:
......@@ -130,6 +147,7 @@ class DatabaseConfig(BaseConfig):
return create_engine(self.create_url(superuser=superuser), **kwargs)
def create_psql_args(self, superuser=False):
# type: (bool) -> List[str]
args = []
if superuser:
args.append('-U')
......@@ -147,6 +165,7 @@ class DatabaseConfig(BaseConfig):
return args
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
self.user = parser.get(section, 'user')
self.name = parser.get(section, 'name')
if parser.has_option(section, 'host'):
......@@ -176,10 +195,12 @@ class DatabaseConfig(BaseConfig):
class IndexConfig(BaseConfig):
def __init__(self):
# type: () -> None
self.host = '127.0.0.1'
self.port = 6080
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'host'):
self.host = parser.get(section, 'host')
if parser.has_option(section, 'port'):
......@@ -197,6 +218,7 @@ class RedisConfig(BaseConfig):
self.port = 6379
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'host'):
self.host = parser.get(section, 'host')
if parser.has_option(section, 'port'):
......@@ -225,6 +247,7 @@ class LoggingConfig(BaseConfig):
self.syslog_facility = None
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
level_names = get_logging_level_names()
for name in parser.options(section):
if name == 'level':
......@@ -254,6 +277,7 @@ class WebSiteConfig(BaseConfig):
self.shutdown_file_path = '/tmp/acoustid-server-shutdown.txt'
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'debug'):
self.debug = parser.getboolean(section, 'debug')
self.secret = parser.get(section, 'secret')
......@@ -292,6 +316,7 @@ class uWSGIConfig(BaseConfig):
self.offload_threads = 1
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'harakiri'):
self.harakiri = parser.getint(section, 'harakiri')
if parser.has_option(section, 'http_timeout'):
......@@ -325,6 +350,7 @@ class SentryConfig(BaseConfig):
self.script_dsn = ''
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'web_dsn'):
self.web_dsn = parser.get(section, 'web_dsn')
if parser.has_option(section, 'api_dsn'):
......@@ -345,6 +371,7 @@ class ReplicationConfig(BaseConfig):
self.import_acoustid_musicbrainz = None
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'import_acoustid'):
self.import_acoustid = parser.get(section, 'import_acoustid')
if parser.has_option(section, 'import_acoustid_musicbrainz'):
......@@ -362,6 +389,7 @@ class ClusterConfig(BaseConfig):
self.secret = None
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
if parser.has_option(section, 'role'):
self.role = parser.get(section, 'role')
if parser.has_option(section, 'base_master_url'):
......@@ -383,6 +411,7 @@ class RateLimiterConfig(BaseConfig):
self.applications = {}
def read_section(self, parser, section):
# type: (RawConfigParser, str) -> None
for name in parser.options(section):
if name == 'global':
self.global_rate_limit = parser.getfloat(section, name)
......@@ -410,8 +439,9 @@ class Config(object):
self.uwsgi = uWSGIConfig()
def read(self, path):
# type: (str) -> None
logger.info("Loading configuration file %s", path)
parser = ConfigParser.RawConfigParser()
parser = RawConfigParser()
parser.read(path)
self.databases.read(parser, 'database')
self.logging.read(parser, 'logging')
......@@ -425,6 +455,7 @@ class Config(object):
self.uwsgi.read(parser, 'uwsgi')
def read_env(self, tests=False):
# type: (bool) -> None
if tests:
prefix = 'ACOUSTID_TEST_'
else:
......
......@@ -4,7 +4,9 @@
import time
import logging
import functools
from typing import Callable, Any
from schedule import Scheduler
from acoustid.script import Script
from acoustid.scripts.update_stats import run_update_stats
from acoustid.scripts.update_lookup_stats import run_update_lookup_stats
from acoustid.scripts.update_user_agent_stats import run_update_user_agent_stats
......@@ -15,8 +17,10 @@ logger = logging.getLogger(__name__)
def create_schedule(script):
# type: (Script) -> Scheduler
def wrap_job(func):
# type: (Callable[[Script, Any, Any], None]) -> Callable[[], None]
@functools.wraps(func)
def wrapper():
logger.info('Running %s', func.__name__)
......@@ -35,6 +39,7 @@ def create_schedule(script):
def run_cron(script):
# type: (Script) -> None
schedule = create_schedule(script)
while True:
schedule.run_pending()
......
......@@ -128,7 +128,7 @@ def merge_missing_mbids(conn):
merge = {}
for old_mbid, new_mbid in results:
merge.setdefault(str(new_mbid), []).append(str(old_mbid))
for new_mbid, old_mbids in merge.iteritems():
for new_mbid, old_mbids in merge.items():
merge_mbids(conn, new_mbid, old_mbids)
......
import typing
from typing import Dict, Any, NewType, TYPE_CHECKING
from sqlalchemy.orm import sessionmaker
from sqlalchemy.engine import Connection
from sqlalchemy.engine import Engine, Connection
from acoustid.tables import metadata
if TYPE_CHECKING:
from acoustid.script import Script
Session = sessionmaker()
AppDB = typing.NewType('AppDB', Connection)
FingerprintDB = typing.NewType('FingerprintDB', Connection)
IngestDB = typing.NewType('IngestDB', Connection)
MusicBrainzDB = typing.NewType('MusicBrainzDB', Connection)
AppDB = NewType('AppDB', Connection)
FingerprintDB = NewType('FingerprintDB', Connection)
IngestDB = NewType('IngestDB', Connection)
MusicBrainzDB = NewType('MusicBrainzDB', Connection)
def get_bind_args(engines):
# type: (Dict[str, Engine]) -> Dict[str, Any]
binds = {}
default_bind_key = 'app'
for table in metadata.sorted_tables:
bind_key = table.info.get('bind_key', 'app')
if bind_key != 'app':
bind_key = table.info.get('bind_key', default_bind_key) if table.info else default_bind_key
if bind_key != default_bind_key:
binds[table] = engines[bind_key]
return {'bind': engines['app'], 'binds': binds}
return {'bind': engines[default_bind_key], 'binds': binds}
def get_session_args(script):
# type: (Script) -> Dict[str, Any]
kwargs = {'twophase': script.config.databases.use_two_phase_commit}
kwargs.update(get_bind_args(script.db_engines))
return kwargs
......@@ -31,10 +36,12 @@ def get_session_args(script):
class DatabaseContext(object):
def __init__(self, script):
# type: (Script) -> None
self.engines = script.db_engines
self.session = Session(**get_session_args(script))
def connection(self, bind_key):
# type: (str) -> Connection
return self.session.connection(bind=self.engines[bind_key])
def get_app_db(self):
......@@ -50,10 +57,13 @@ class DatabaseContext(object):
return IngestDB(self.connection('ingest'))
def close(self):
# type: () -> None
self.session.close()
def __enter__(self):
# type: () -> DatabaseContext
return self
def __exit__(self, exc_type, exc_val, exc_tb):
# type: (Any, Any, Any) -> None
self.close()
......@@ -11,7 +11,7 @@ from collections import namedtuple, deque
logger = logging.getLogger(__name__)
CRLF = '\r\n'
CRLF = b'\r\n'
def encode_fp(data):
......@@ -55,7 +55,7 @@ class IndexClient(Index):
self.in_transaction = False
self.created = time.time()
self.sock = None
self._buffer = ''
self._buffer = b''
self._connect()
def __str__(self):
......@@ -79,7 +79,8 @@ class IndexClient(Index):
raise IndexClientError('unable to connect to the index server at %s:%s' % (self.host, self.port))
def _putline(self, line):
self.sock.sendall('%s%s' % (line, CRLF))
# type: (str) -> None
self.sock.sendall(b'%s%s' % (line.encode('utf8'), CRLF))
def _getline(self, timeout=None):
pos = self._buffer.find(CRLF)
......@@ -113,7 +114,7 @@ class IndexClient(Index):
raise IndexClientError("read timeout exceeded")
line = self._buffer[:pos]
self._buffer = self._buffer[pos + len(CRLF):]
return line
return line.decode('utf8')
def _request(self, request, timeout=None):
self._putline(request)
......@@ -168,7 +169,7 @@ class IndexClient(Index):
self.rollback()
self._putline('quit')
self.sock.close()
except StandardError:
except Exception:
logger.exception("Error while closing connection %s", self)
self.sock = None
......
# Copyright (C) 2013 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from __future__ import division
import time
import logging
from redis import Redis
logger = logging.getLogger(__name__)
......@@ -10,17 +12,19 @@ logger = logging.getLogger(__name__)
class RateLimiter(object):
def __init__(self, redis, prefix, interval=20, steps=4):
# type: (Redis, str, int, int) -> None
self.redis = redis
self.prefix = prefix
self.interval = interval
self.steps = steps
def limit(self, bucket, key, rate):
# type: (str, str, float) -> bool
ts = int(self.steps * time.time() / self.interval)
full_key = '%s:%s:%s:%s' % (self.prefix, bucket, key, ts)
count = self.redis.incr(full_key)
self.redis.expire(full_key, (self.steps + 1) * self.interval / self.steps)
self.redis.expire(full_key, (self.steps + 1) * self.interval // self.steps)
for i in range(1, self.steps):
full_key_i = '%s:%s:%s:%s' % (self.prefix, bucket, key, ts - i)
......@@ -31,5 +35,5 @@ class RateLimiter(object):
logger.info("Key %s:%s exceeded the rate limit of %s requests per %s seconds", bucket, key, rate * self.interval, self.interval)
return True
logger.debug("Key %s:%s had %s requests in the last %s seconds (rate %f)", bucket, key, count, self.interval, float(count) / self.interval)
logger.debug("Key %s:%s had %s requests in the last %s seconds (rate %f)", bucket, key,