...
 
Commits (10)
......@@ -20,12 +20,19 @@ test:
- redis
- name: quay.io/acoustid/postgresql:master
alias: postgres
- name: quay.io/acoustid/acoustid-index:master
alias: acoustid-index
variables:
POSTGRES_USER: acoustid
POSTGRES_PASSWORD: acoustid
POSTGRES_DB: acoustid_test
ACOUSTID_TEST_POSTGRES_HOST: postgres
ACOUSTID_TEST_DB_TWO_PHASE_COMMIT: 0
ACOUSTID_TEST_DB_DEFAULT_HOST: postgres
ACOUSTID_TEST_DB_DEFAULT_NAME: acoustid_test
ACOUSTID_TEST_DB_SLOW_HOST: postgres
ACOUSTID_TEST_DB_SLOW_NAME: acoustid_test
ACOUSTID_TEST_REDIS_HOST: redis
ACOUSTID_TEST_INDEX_HOST: acoustid-index
cache:
key: $CI_JOB_NAME-$CI_COMMIT_REF_SLUG
paths:
......
[database]
two_phase_commit=yes
[database:default]
host=127.0.0.1
port=5432
user=acoustid
name=acoustid_test
password=acoustid
[database:slow]
host=127.0.0.1
port=5432
user=acoustid
name=acoustid_slow_test
password=acoustid
[logging]
level=WARNING
level.sqlalchemy=WARNING
syslog=yes
syslog_facility=local1
[index]
host=127.0.0.1
port=6080
[redis]
host=127.0.0.1
port=6379
......
[database]
name=acoustid
user=acoustid
password=XXX
superuser=postgres
host=localhost
port=5432
[database_slow]
name=acoustid
user=acoustid
password=XXX
host=localhost
port=5432
......
This diff is collapsed.
......@@ -18,8 +18,10 @@ def read_env_item(obj, key, name, convert=None):
value = None
if name in os.environ:
value = os.environ[name]
if name + '_FILE' in os.environ:
logger.info('Reading config value from environment variable %s', name)
elif name + '_FILE' in os.environ:
value = open(os.environ[name + '_FILE']).read().strip()
logger.info('Reading config value from environment variable %s', name + '_FILE')
if value is not None:
if convert is not None:
value = convert(value)
......@@ -39,6 +41,35 @@ class BaseConfig(object):
pass
class DatabasesConfig(BaseConfig):
def __init__(self):
self.databases = {
'default': DatabaseConfig(),
'slow': DatabaseConfig(),
}
self.use_two_phase_commit = False
def create_engines(self, **kwargs):
engines = {}
for name, db_config in self.databases.items():
engines[name] = db_config.create_engine(**kwargs)
return engines
def read_section(self, parser, section):
if parser.has_option(section, 'two_phase_commit'):
self.use_two_phase_commit = parser.getboolean(section, 'two_phase_commit')
for name, sub_config in self.databases.items():
sub_section = '{}:{}'.format(section, name)
sub_config.read_section(parser, sub_section)
def read_env(self, prefix):
read_env_item(self, 'use_two_phase_commit', prefix + 'TWO_PHASE_COMMIT', convert=str_to_bool)
for name, sub_config in self.databases.items():
sub_prefix = prefix + name.upper() + '_'
sub_config.read_env(sub_prefix)
class DatabaseConfig(BaseConfig):
def __init__(self):
......@@ -103,21 +134,21 @@ class DatabaseConfig(BaseConfig):
if parser.has_option(section, 'password'):
self.password = parser.get(section, 'password')
if parser.has_option(section, 'pool_size'):
self.password = parser.getint(section, 'pool_size')
self.pool_size = parser.getint(section, 'pool_size')
if parser.has_option(section, 'pool_recycle'):
self.password = parser.getint(section, 'pool_recycle')
self.pool_recycle = parser.getint(section, 'pool_recycle')
if parser.has_option(section, 'pool_pre_ping'):
self.password = parser.getboolean(section, 'pool_pre_ping')
self.pool_pre_ping = parser.getboolean(section, 'pool_pre_ping')
def read_env(self, prefix):
read_env_item(self, 'name', prefix + 'POSTGRES_DB')
read_env_item(self, 'host', prefix + 'POSTGRES_HOST')
read_env_item(self, 'port', prefix + 'POSTGRES_PORT', convert=int)
read_env_item(self, 'user', prefix + 'POSTGRES_USER')
read_env_item(self, 'password', prefix + 'POSTGRES_PASSWORD')
read_env_item(self, 'pool_size', prefix + 'POSTGRES_POOL_SIZE', convert=int)
read_env_item(self, 'pool_recycle', prefix + 'POSTGRES_POOL_RECYCLE', convert=int)
read_env_item(self, 'pool_pre_ping', prefix + 'POSTGRES_POOL_PRE_PING', convert=str_to_bool)
read_env_item(self, 'name', prefix + 'NAME')
read_env_item(self, 'host', prefix + 'HOST')
read_env_item(self, 'port', prefix + 'PORT', convert=int)
read_env_item(self, 'user', prefix + 'USER')
read_env_item(self, 'password', prefix + 'PASSWORD')
read_env_item(self, 'pool_size', prefix + 'POOL_SIZE', convert=int)
read_env_item(self, 'pool_recycle', prefix + 'POOL_RECYCLE', convert=int)
read_env_item(self, 'pool_pre_ping', prefix + 'POOL_PRE_PING', convert=str_to_bool)
class IndexConfig(BaseConfig):
......@@ -342,7 +373,7 @@ class RateLimiterConfig(BaseConfig):
class Config(object):
def __init__(self):
self.database = DatabaseConfig()
self.databases = DatabasesConfig()
self.logging = LoggingConfig()
self.website = WebSiteConfig()
self.index = IndexConfig()
......@@ -357,7 +388,7 @@ class Config(object):
logger.info("Loading configuration file %s", path)
parser = ConfigParser.RawConfigParser()
parser.read(path)
self.database.read(parser, 'database')
self.databases.read(parser, 'database')
self.logging.read(parser, 'logging')
self.website.read(parser, 'website')
self.index.read(parser, 'index')
......@@ -373,7 +404,7 @@ class Config(object):
prefix = 'ACOUSTID_TEST_'
else:
prefix = 'ACOUSTID_'
self.database.read_env(prefix)
self.databases.read_env(prefix + 'DB_')
self.logging.read_env(prefix)
self.website.read_env(prefix)
self.index.read_env(prefix)
......
......@@ -21,3 +21,6 @@ FINGERPRINT_MAX_LENGTH_DIFF = 7
FINGERPRINT_MAX_ALLOWED_LENGTH_DIFF = 30
MAX_REQUESTS_PER_SECOND = 3
MAX_FOREIGNID_NAMESPACE_LENGTH = 10
MAX_FOREIGNID_VALUE_LENGTH = 64
......@@ -28,6 +28,7 @@ def insert_submission(conn, data):
'source_id': data.get('source_id'),
'format_id': data.get('format_id'),
'meta_id': data.get('meta_id'),
'foreignid': data.get('foreignid'),
'foreignid_id': data.get('foreignid_id'),
})
id = conn.execute(insert_stmt).inserted_primary_key[0]
......
from sqlalchemy.orm import sessionmaker
from acoustid.tables import metadata
Session = sessionmaker()
def get_bind_args(engines):
binds = {}
for table in metadata.sorted_tables:
bind_key = table.info.get('bind_key', 'default')
if bind_key != 'default':
binds[table] = engines[bind_key]
return {'bind': engines['default'], 'binds': binds}
def get_session_args(script):
kwargs = {'twophase': script.config.databases.use_two_phase_commit}
kwargs.update(get_bind_args(script.db_engines))
return kwargs
class DatabaseContext(object):
def __init__(self, bind):
self.session = Session(bind=bind)
def __init__(self, script):
self.session = Session(**get_session_args(script))
def __enter__(self):
return self
......
......@@ -79,3 +79,11 @@ class StatsLookups(Base):
__table__ = tables.stats_lookups
application = relationship('Application')
class Submission(Base):
__table__ = tables.submission
class SubmissionResult(Base):
__table__ = tables.submission_result
......@@ -11,11 +11,26 @@ from optparse import OptionParser
from acoustid.config import Config
from acoustid.indexclient import IndexClientPool
from acoustid.utils import LocalSysLogHandler
from acoustid.db import DatabaseContext
from acoustid._release import GIT_RELEASE
logger = logging.getLogger(__name__)
class ScriptContext(object):
def __init__(self, db, redis, index):
self.db = db
self.redis = redis
self.index = index
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.db.close()
class Script(object):
def __init__(self, config_path, tests=False):
......@@ -23,25 +38,30 @@ class Script(object):
if config_path:
self.config.read(config_path)
self.config.read_env(tests=tests)
if tests:
self.engine = sqlalchemy.create_engine(self.config.database.create_url(),
poolclass=sqlalchemy.pool.AssertionPool)
else:
self.engine = sqlalchemy.create_engine(self.config.database.create_url())
create_engine_kwargs = {'poolclass': sqlalchemy.pool.AssertionPool} if tests else {}
self.db_engines = self.config.databases.create_engines(**create_engine_kwargs)
if not self.config.index.host:
self.index = None
else:
self.index = IndexClientPool(host=self.config.index.host,
port=self.config.index.port,
recycle=60)
if not self.config.redis.host:
self.redis = None
else:
self.redis = Redis(host=self.config.redis.host,
port=self.config.redis.port)
self._console_logging_configured = False
self.setup_logging()
@property
def engine(self):
return self.db_engines['default']
def setup_logging(self):
for logger_name, level in sorted(self.config.logging.levels.items()):
logging.getLogger(logger_name).setLevel(level)
......@@ -66,6 +86,10 @@ class Script(object):
def setup_sentry(self):
sentry_sdk.init(self.config.sentry.script_dsn, release=GIT_RELEASE)
def context(self):
db = DatabaseContext(self).session
return ScriptContext(db=db, redis=self.redis, index=self.index)
def run_script(func, option_cb=None, master_only=False):
parser = OptionParser()
......
......@@ -9,7 +9,7 @@ from cStringIO import StringIO
from werkzeug.exceptions import HTTPException
from werkzeug.routing import Map, Rule, Submount
from werkzeug.wrappers import Request
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.middleware.proxy_fix import ProxyFix
from acoustid.script import Script
from acoustid._release import GIT_RELEASE
import acoustid.api.v1
......@@ -53,12 +53,15 @@ admin_url_rules = [
class Server(Script):
def __init__(self, config_path):
super(Server, self).__init__(config_path)
def __init__(self, config_path, **kwargs):
super(Server, self).__init__(config_path, **kwargs)
url_rules = api_url_rules + admin_url_rules
self.url_map = Map(url_rules, strict_slashes=False)
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
urls = self.url_map.bind_to_environ(environ)
handler = None
try:
......@@ -112,16 +115,16 @@ def add_cors_headers(app):
return wrapped_app
def make_application(config_path):
def make_application(config_path, **kwargs):
"""Construct a WSGI application for the AcoustID server
:param config_path: path to the server configuration file
"""
server = Server(config_path)
server = Server(config_path, **kwargs)
server.setup_sentry()
app = GzipRequestMiddleware(server)
app = ProxyFix(app)
app = SentryWsgiMiddleware(app)
app = replace_double_slashes(app)
app = add_cors_headers(app)
return server, app
server.wsgi_app = GzipRequestMiddleware(server.wsgi_app)
server.wsgi_app = ProxyFix(server.wsgi_app)
server.wsgi_app = SentryWsgiMiddleware(server.wsgi_app)
server.wsgi_app = replace_double_slashes(server.wsgi_app)
server.wsgi_app = add_cors_headers(server.wsgi_app)
return server
import sqlalchemy
import sqlalchemy.event
from sqlalchemy import (
MetaData, Table, Column, Index,
MetaData, Table, Column, Index, Sequence,
ForeignKey, CheckConstraint,
Integer, String, DateTime, Boolean, Date, Text, SmallInteger, BigInteger, CHAR,
DDL, sql,
......@@ -103,7 +104,7 @@ source = Table('source', metadata,
Index('source_idx_uniq', 'application_id', 'account_id', 'version', unique=True),
)
submission = Table('submission', metadata,
submission_old = Table('submission_old', metadata,
Column('id', Integer, primary_key=True),
Column('fingerprint', ARRAY(Integer), nullable=False),
Column('length', SmallInteger, CheckConstraint('length>0'), nullable=False),
......@@ -118,7 +119,55 @@ submission = Table('submission', metadata,
Column('foreignid_id', Integer, ForeignKey('foreignid.id')),
)
Index('submission_idx_handled', submission.c.id, postgresql_where=submission.c.handled == False) # noqa: E712
Index('submission_idx_handled', submission_old.c.id, postgresql_where=submission_old.c.handled == False) # noqa: E712
submission_id_seq = Sequence('submission_id_seq', metadata=metadata)
submission = Table('submission', metadata,
Column('id', Integer, submission_id_seq, server_default=submission_id_seq.next_value(), primary_key=True),
Column('created', DateTime(timezone=True), server_default=sql.func.current_timestamp(), nullable=False),
Column('handled', Boolean, default=False, server_default=sql.false()),
Column('account_id', Integer, nullable=False), # ForeignKey('account.id')
Column('application_id', Integer, nullable=False), # ForeignKey('application.id')
Column('application_version', String),
Column('fingerprint', ARRAY(Integer), nullable=False),
Column('duration', Integer, CheckConstraint('duration>0'), nullable=False),
Column('bitrate', Integer, CheckConstraint('bitrate>0')),
Column('format', String),
Column('mbid', UUID),
Column('puid', UUID),
Column('foreignid', String),
Column('track', String),
Column('artist', String),
Column('album', String),
Column('album_artist', String),
Column('track_no', Integer),
Column('disc_no', Integer),
Column('year', Integer),
info={'bind_key': 'slow'},
)
submission_result = Table('submission_result', metadata,
Column('submission_id', Integer, primary_key=True, autoincrement=False),
Column('created', DateTime(timezone=True), server_default=sql.func.current_timestamp(), nullable=False),
Column('account_id', Integer, nullable=False), # ForeignKey('account.id')
Column('application_id', Integer, nullable=False), # ForeignKey('application.id')
Column('application_version', String),
Column('fingerprint_id', Integer, nullable=False), # ForeignKey('fingerprint.id')
Column('track_id', Integer, nullable=False), # ForeignKey('track.id')
Column('meta_id', Integer), # ForeignKey('meta.id')
Column('mbid', UUID),
Column('puid', UUID),
Column('foreignid', String),
info={'bind_key': 'slow'},
)
stats = Table('stats', metadata,
Column('id', Integer, primary_key=True),
......@@ -193,6 +242,8 @@ fingerprint = Table('fingerprint', metadata,
Index('fingerprint_idx_track_id', 'track_id'),
)
fingerprint.add_is_dependent_on(track)
fingerprint_source = Table('fingerprint_source', metadata,
Column('id', Integer, primary_key=True),
Column('fingerprint_id', Integer, ForeignKey('fingerprint.id'), nullable=False),
......
......@@ -9,6 +9,7 @@ import datetime
import hmac
import base64
import six
from acoustid.const import MAX_FOREIGNID_NAMESPACE_LENGTH, MAX_FOREIGNID_VALUE_LENGTH
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urlencode
from logging import Handler
......@@ -54,7 +55,15 @@ def is_int(s):
def is_foreignid(s):
return bool(re.match(r'^[0-9a-z]+:.+$', s))
match = re.match(r'^([0-9a-z]+):(.+)$', s)
if match is None:
return False
namespace, value = match.groups()
if len(namespace) > MAX_FOREIGNID_NAMESPACE_LENGTH:
return False
if len(value) > MAX_FOREIGNID_VALUE_LENGTH:
return False
return True
def singular(plural):
......
......@@ -5,9 +5,10 @@ import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
from flask import Flask, request, session
from flask.sessions import SecureCookieSessionInterface
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.middleware.proxy_fix import ProxyFix
from sqlalchemy.orm import scoped_session
from acoustid.script import Script
from acoustid.db import get_session_args
from acoustid.web import db
from acoustid.web.views.general import general_page
from acoustid.web.views.user import user_page
......@@ -36,6 +37,8 @@ def make_application(config_filename=None, tests=False):
GOOGLE_OAUTH_CLIENT_ID=config.website.google_oauth_client_id,
GOOGLE_OAUTH_CLIENT_SECRET=config.website.google_oauth_client_secret,
)
if tests:
app.config['TESTING'] = True
app.acoustid_script = script
app.acoustid_config = config
......@@ -88,7 +91,7 @@ def make_application(config_filename=None, tests=False):
from acoustid.api import get_health_response
return get_health_response(script, request)
db.session_factory.configure(bind=config.database.create_engine())
db.session_factory.configure(**get_session_args(script))
db.session = scoped_session(db.session_factory, scopefunc=get_flask_request_scope)
app.register_blueprint(general_page)
......
......@@ -33,7 +33,7 @@ def render_page(name, **context):
text = file.read().decode('utf8')
text = render_template_string(text, **context)
md = Markdown(extensions=['meta'])
md.treeprocessors["flask_links"] = MarkdownFlaskUrlProcessor(md)
md.treeprocessors.register(MarkdownFlaskUrlProcessor(md), 'flask_links', 50)
html = md.convert(text)
title = ' '.join(md.Meta.get('title', []))
return render_template('page.html', content=html, title=title)
......
......@@ -17,7 +17,7 @@ fi
echo "GIT_RELEASE = '$GIT_RELEASE'" > acoustid/_release.py
docker pull $IMAGE:$PREV_VERSION
docker pull $IMAGE:$PREV_VERSION || true
docker build --cache-from=$IMAGE:$PREV_VERSION -t $IMAGE:$VERSION .
docker push $IMAGE:$VERSION
......
CREATE DATABASE "acoustid";
CREATE DATABASE "acoustid_test";
CREATE DATABASE "acoustid_slow";
CREATE DATABASE "acoustid_slow_test";
\c acoustid
create extension intarray;
create extension pgcrypto;
......@@ -12,3 +15,15 @@ create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
\c acoustid_slow
create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
\c acoustid_slow_test
create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
......@@ -2,8 +2,10 @@
set -ex
/wait-for-it.sh $ACOUSTID_TEST_INDEX_HOST:6080
/wait-for-it.sh $ACOUSTID_TEST_REDIS_HOST:6379
/wait-for-it.sh $ACOUSTID_TEST_POSTGRES_HOST:5432
/wait-for-it.sh $ACOUSTID_TEST_DB_DEFAULT_HOST:5432
/wait-for-it.sh $ACOUSTID_TEST_DB_SLOW_HOST:5432
export PIP_CACHE_DIR=`pwd`/pip-cache
......
......@@ -29,6 +29,8 @@ script_location = alembic
# are written from script.py.mako
# output_encoding = utf-8
databases = default, slow
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
......
Generic single-database configuration.
\ No newline at end of file
AcoustID's multi-database configuration.
from __future__ import with_statement
import os
import logging
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
config = context.config
fileConfig(config.config_file_name)
logger = logging.getLogger("alembic.env")
import acoustid.tables
target_metadata = acoustid.tables.metadata
......@@ -17,12 +19,25 @@ acoustid_config = acoustid.config.Config()
acoustid_config.read(acoustid_config_filename)
acoustid_config.read_env()
def include_object(obj, name, type, reflected, compare_to):
if type == "table" and obj.schema == "musicbrainz":
return False
if type == "column" and not obj.table.schema == "musicbrainz":
return False
return True
use_two_phase_commit = acoustid_config.databases.use_two_phase_commit
def include_object(db_name):
def inner(obj, name, obj_type, reflected, compare_to):
if obj_type == "table":
if obj.schema == "musicbrainz":
return False
bind_key = obj.info.get('bind_key', 'default')
if bind_key != db_name:
return False
if obj_type == "column":
if obj.table.schema == "musicbrainz":
return False
bind_key = obj.table.info.get('bind_key', 'default')
if bind_key != db_name:
return False
return True
return inner
def run_migrations_offline():
......@@ -37,13 +52,18 @@ def run_migrations_offline():
script output.
"""
url = acoustid_config.database.create_url()
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True,
include_object=include_object)
for name, db_config in acoustid_config.databases.databases.items():
logger.info("Migrating database %s" % name)
with context.begin_transaction():
context.run_migrations()
context.configure(
url=db_config.create_url(),
target_metadata=target_metadata,
literal_binds=True,
include_object=include_object(name),
)
with context.begin_transaction():
context.run_migrations(engine_name=name)
def run_migrations_online():
......@@ -53,17 +73,45 @@ def run_migrations_online():
and associate a connection with the context.
"""
connectable = acoustid_config.database.create_engine(poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
include_object=include_object,
)
with context.begin_transaction():
context.run_migrations()
engines = {}
for name, db_config in acoustid_config.databases.databases.items():
engines[name] = rec = {}
rec["engine"] = db_config.create_engine(poolclass=pool.NullPool)
for name, rec in engines.items():
engine = rec["engine"]
rec["connection"] = conn = engine.connect()
if use_two_phase_commit:
rec["transaction"] = conn.begin_twophase()
else:
rec["transaction"] = conn.begin()
try:
for name, rec in engines.items():
logger.info("Migrating database %s" % name)
context.configure(
connection=rec["connection"],
upgrade_token="%s_upgrades" % name,
downgrade_token="%s_downgrades" % name,
target_metadata=target_metadata,
include_object=include_object(name),
)
context.run_migrations(engine_name=name)
if use_two_phase_commit:
for rec in engines.values():
rec["transaction"].prepare()
for rec in engines.values():
rec["transaction"].commit()
except:
for rec in engines.values():
rec["transaction"].rollback()
raise
finally:
for rec in engines.values():
rec["connection"].close()
if context.is_offline_mode():
......
"""${message}
<%!
import re
%>"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
......@@ -12,13 +18,20 @@ down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
% for db_name in re.split(r',\s*', config.get_main_option("databases")):
def upgrade_${db_name}():
${context.get("%s_upgrades" % db_name, "pass")}
def downgrade():
${downgrades if downgrades else "pass"}
def downgrade_${db_name}():
${context.get("%s_downgrades" % db_name, "pass")}
% endfor
......@@ -16,7 +16,15 @@ from alembic import op
import sqlalchemy as sa
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.create_foreign_key(op.f('account_google_fk_account_id'), 'account_google', 'account', ['account_id'], ['id'])
op.create_foreign_key(op.f('application_fk_account_id'), 'application', 'account', ['account_id'], ['id'])
op.create_foreign_key(op.f('stats_lookups_fk_application_id'), 'stats_lookups', 'application', ['application_id'], ['id'])
......@@ -46,7 +54,7 @@ def upgrade():
op.alter_column('recording_acoustid', 'created', nullable=False)
def downgrade():
def downgrade_default():
op.alter_column('recording_acoustid', 'created', nullable=True)
op.alter_column('track_foreignid_source', 'created', nullable=True)
op.alter_column('track_foreignid', 'created', nullable=True)
......@@ -74,3 +82,11 @@ def downgrade():
op.drop_constraint(op.f('stats_lookups_fk_application_id'), 'stats_lookups', type_='foreignkey')
op.drop_constraint(op.f('application_fk_account_id'), 'application', type_='foreignkey')
op.drop_constraint(op.f('account_google_fk_account_id'), 'account_google', type_='foreignkey')
def upgrade_slow():
pass
def downgrade_slow():
pass
"""submission result
Revision ID: 4581a68fa644
Revises: d52d50968cf7
Create Date: 2019-06-08 12:17:40.208947
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '4581a68fa644'
down_revision = 'd52d50968cf7'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
pass
def downgrade_default():
pass
def upgrade_slow():
op.create_table('submission_result',
sa.Column('submission_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('application_version', sa.String(), nullable=True),
sa.Column('fingerprint_id', sa.Integer(), nullable=False),
sa.Column('track_id', sa.Integer(), nullable=False),
sa.Column('meta_id', sa.Integer(), nullable=True),
sa.Column('mbid', postgresql.UUID(), nullable=True),
sa.Column('puid', postgresql.UUID(), nullable=True),
sa.Column('foreignid', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('submission_id', name=op.f('submission_result_pkey'))
)
def downgrade_slow():
op.drop_table('submission_result')
......@@ -16,7 +16,16 @@ from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.create_table('account_google',
sa.Column('google_user_id', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
......@@ -356,7 +365,7 @@ def upgrade():
)
def downgrade():
def downgrade_default():
op.drop_table('track_puid_source')
op.drop_table('track_meta_source')
op.drop_index(op.f('track_mbid_source_idx_track_mbid_id'), table_name='track_mbid_source')
......@@ -422,3 +431,11 @@ def downgrade():
op.drop_table('account_stats_control')
op.drop_index('account_google_idx_account_id', table_name='account_google')
op.drop_table('account_google')
def upgrade_slow():
pass
def downgrade_slow():
pass
......@@ -16,7 +16,15 @@ from alembic import op
import sqlalchemy as sa
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.drop_constraint(u'fingerprint_source_fk_submission_id', 'fingerprint_source', type_='foreignkey')
op.drop_constraint(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', type_='foreignkey')
op.drop_constraint(u'track_mbid_source_fk_submission_id', 'track_mbid_source', type_='foreignkey')
......@@ -24,9 +32,17 @@ def upgrade():
op.drop_constraint(u'track_puid_source_fk_submission_id', 'track_puid_source', type_='foreignkey')
def downgrade():
def downgrade_default():
op.create_foreign_key(u'track_puid_source_fk_submission_id', 'track_puid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_meta_source_fk_submission_id', 'track_meta_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_mbid_source_fk_submission_id', 'track_mbid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'fingerprint_source_fk_submission_id', 'fingerprint_source', 'submission', ['submission_id'], ['id'])
def upgrade_slow():
pass
def downgrade_slow():
pass
"""move submissions
Revision ID: d52d50968cf7
Revises: ae7e1e5763ef
Create Date: 2019-06-08 11:04:53.563059
"""
import datetime
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence
from dateutil.relativedelta import relativedelta
# revision identifiers, used by Alembic.
revision = 'd52d50968cf7'
down_revision = 'ae7e1e5763ef'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.rename_table('submission', 'submission_old')
def downgrade_default():
op.rename_table('submission_old', 'submission')
def upgrade_slow():
op.execute(CreateSequence(Sequence('submission_id_seq')))
op.create_table('submission',
sa.Column('id', sa.Integer(), server_default=sa.text("nextval('submission_id_seq')"), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('handled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('application_version', sa.String(), nullable=True),
sa.Column('fingerprint', postgresql.ARRAY(sa.Integer()), nullable=False),
sa.Column('duration', sa.Integer(), nullable=False),
sa.Column('bitrate', sa.Integer(), nullable=True),
sa.Column('format', sa.String(), nullable=True),
sa.Column('mbid', postgresql.UUID(), nullable=True),
sa.Column('puid', postgresql.UUID(), nullable=True),
sa.Column('foreignid', sa.String(), nullable=True),
sa.Column('track', sa.String(), nullable=True),
sa.Column('artist', sa.String(), nullable=True),
sa.Column('album', sa.String(), nullable=True),
sa.Column('album_artist', sa.String(), nullable=True),
sa.Column('track_no', sa.Integer(), nullable=True),
sa.Column('disc_no', sa.Integer(), nullable=True),
sa.Column('year', sa.Integer(), nullable=True),
postgresql_partition_by='RANGE (created)',
)
one_month = relativedelta(months=1)
partitions_from = datetime.date(2010, 1, 1)
partitions_to = datetime.date.today().replace(month=1, day=1) + relativedelta(months=12)
range_from = partitions_from
while range_from < partitions_to:
range_to = range_from + one_month
op.execute("""
CREATE TABLE IF NOT EXISTS submission_y{range_from.year:04d}m{range_to.month:02d}
PARTITION OF submission
FOR VALUES FROM ('{range_from}') TO ('{range_to}');
ALTER TABLE submission_y{range_from.year:04d}m{range_to.month:02d}
ADD PRIMARY KEY (id);
CREATE INDEX submission_y{range_from.year:04d}m{range_to.month:02d}_idx_handled
ON submission_y{range_from.year:04d}m{range_to.month:02d} (handled);
""".format(range_from=range_from, range_to=range_to))
range_from = range_to
def downgrade_slow():
op.drop_table('submission')
op.execute(DropSequence(Sequence('submission_id_seq')))
......@@ -16,11 +16,27 @@ from alembic import op
import sqlalchemy as sa
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.add_column('account',
sa.Column('is_admin', sa.Boolean(), server_default=sa.text('false'), nullable=False)
)
def downgrade():
def downgrade_default():
op.drop_column('account', 'is_admin')
def upgrade_slow():
pass
def downgrade_slow():
pass
......@@ -6,8 +6,14 @@ services:
ports:
- "127.0.0.1:6379:6379"
index:
image: quay.io/acoustid/acoustid-index:master
ports:
- "127.0.0.1:6080:6080"
postgres:
image: quay.io/acoustid/postgresql:master
command: ["-c", "max_connections=100", "-c", "max_prepared_transactions=110"]
ports:
- "127.0.0.1:5432:5432"
volumes:
......
This diff is collapsed.
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from nose.tools import assert_equals
from acoustid.api import serialize_response
def test_serialize_json():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'json')
assert_equals('application/json; charset=UTF-8', resp.content_type)
expected = b'''{"artists": [{"cities": ["Paris", "Lyon"], "name": "Jean Michel Jarre", "year": 1948}], "status": "ok"}'''
assert_equals(expected, resp.data)
def test_serialize_jsonp():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'jsonp:getData')
assert_equals('application/javascript; charset=UTF-8', resp.content_type)
expected = b'''getData({"artists": [{"cities": ["Paris", "Lyon"], "name": "Jean Michel Jarre", "year": 1948}], "status": "ok"})'''
assert_equals(expected, resp.data)
def test_serialize_xml():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'xml')
assert_equals('text/xml; charset=UTF-8', resp.content_type)
expected = (
'''<?xml version='1.0' encoding='UTF-8'?>\n<response><status>ok</status><artists><artist><cities><city>Paris</city><city>Lyon'''
'''</city></cities><name>Jean Michel Jarre</name><year>1948</year></artist></artists></response>'''
)
assert_equals(expected, resp.data)
def test_serialize_xml_attribute():
data = {'@status': 'ok'}
resp = serialize_response(data, 'xml')
assert_equals('text/xml; charset=UTF-8', resp.content_type)
expected = '''<?xml version='1.0' encoding='UTF-8'?>\n<response status="ok" />'''
assert_equals(expected, resp.data)
from nose.tools import assert_equal
from acoustid.web import db
from tests import make_web_application
app = None
def setup():
global app
app = make_web_application()
app.config['TESTING'] = True
def test_track_page():
client = app.test_client()
rv = client.get('/track/eb31d1c3-950e-468b-9e36-e46fa75b1291')
assert_equal(rv.status_code, 200)
assert_equal(rv.data.count('Custom Track'), 1)
assert_equal(rv.data.count('Custom Artist'), 1)
assert not db.session.registry.has()
......@@ -4,4 +4,26 @@
#
# pip-compile --output-file requirements_dev.txt requirements_dev.in
#
nose==1.3.7
atomicwrites==1.3.0 # via pytest
attrs==19.1.0 # via pytest
backports.functools-lru-cache==1.5 # via soupsieve
beautifulsoup4==4.7.1 # via webtest
configparser==3.7.4 # via importlib-metadata
contextlib2==0.5.5 # via importlib-metadata
funcsigs==1.0.2 # via pytest
importlib-metadata==0.17 # via pluggy, pytest
more-itertools==5.0.0 # via pytest
packaging==19.0 # via pytest
pathlib2==2.3.3 # via importlib-metadata, pytest
pluggy==0.12.0 # via pytest
py==1.8.0 # via pytest
pyparsing==2.4.0 # via packaging
pytest==4.6.2
scandir==1.10.0 # via pathlib2
six==1.12.0 # via more-itertools, packaging, pathlib2, pytest, webtest
soupsieve==1.9.1 # via beautifulsoup4
waitress==1.3.0 # via webtest
wcwidth==0.1.7 # via pytest
webob==1.8.5 # via webtest
webtest==2.0.33
zipp==0.5.1 # via importlib-metadata
......@@ -160,7 +160,7 @@ def disable_track_mbid(db, track_mbid, note):
def main(script, opts, args):
db = DatabaseContext(script.engine)
db = DatabaseContext(script.db)
min_track_id = db.session.query(sql.func.min(Track.id)).scalar()
max_track_id = db.session.query(sql.func.max(Track.id)).scalar()
......
......@@ -8,7 +8,7 @@ import os
def main(script, opts, args):
os.execlp('psql', 'psql', *(script.config.database.create_psql_args() + args))
os.execlp('psql', 'psql', *(script.config.databases.databases['default'].create_psql_args() + args))
run_script(main)
This diff is collapsed.
import os
import pytest
from acoustid.config import str_to_bool
from acoustid.tables import metadata
from acoustid.data.fingerprint import update_fingerprint_index
from tests.data import create_sample_data
RECREATE_DB = True
def get_tables_for_bind(metadata, bind_key):
tables = []
for table in metadata.sorted_tables:
if table.info.get('bind_key', 'default') == bind_key:
tables.append(table)
return tables
def create_all(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
metadata.create_all(bind=conn, tables=tables)
def drop_all(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
metadata.drop_all(bind=conn, tables=tables)
def delete_from_all_tables(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
for table in reversed(tables):
conn.execute(table.delete())
def delete_shutdown_file(config):
if os.path.exists(config.website.shutdown_file_path):
os.remove(config.website.shutdown_file_path)
def prepare_database(script):
delete_from_all_tables(metadata, script.db_engines)
with script.context() as ctx:
create_sample_data(ctx)
ctx.db.flush()
update_fingerprint_index(ctx.db.connection(), ctx.index)
ctx.db.commit()
@pytest.fixture(scope='session')
def config_path():
return os.path.dirname(os.path.abspath(__file__)) + '/../acoustid-test.conf'
@pytest.fixture(scope='session')
def script_global(config_path):
from acoustid.script import Script
script = Script(config_path, tests=True)
recreate_db = str_to_bool(os.environ.get('ACOUSTID_TEST_RECREATE', ''))
if recreate_db:
drop_all(metadata, script.db_engines)
create_all(metadata, script.db_engines)
prepare_database(script)
yield script
@pytest.fixture(scope='session')
def server_global(script_global, config_path):
from acoustid.server import make_application
server = make_application(config_path, tests=True)
delete_shutdown_file(server.config)
yield server
@pytest.fixture(scope='session')
def web_app_global(script_global, config_path):
from acoustid.web.app import make_application
server = make_application(config_path, tests=True)
delete_shutdown_file(server.acoustid_config)
yield server
@pytest.fixture()
def server(server_global):
yield server_global
@pytest.fixture()
def web_app(web_app_global):
yield web_app_global
@pytest.fixture()
def cleanup(script_global):
yield
prepare_database(script_global)
from acoustid.models import Account, Application, Fingerprint, Meta, Track, TrackMeta
TEST_2_LENGTH = 320
TEST_2_FP = 'AQABVtuUZFGShAqO-h9OHD96SvhwBVNCKQnOIYmiIc-ENwF7TDe8Hr0W_AjhvRCP2sfT4DTS7zjyOYeqaI-RSxee5RmaWzhOHnlcaB6HnPgpdE-DkWIH2ysYG_Eh9zJCyfCXGOdw-EGoD2p69IavWOhzMD-a9tBx9FgPVz2qNDvQH3744ISIXRKeHto5_MhyeMtxc-COnYJ_lHLwRAgPvShz_Hga4zd8HD9UKXWOPP3xRLmGnlbQHKfxGPeRvAt6UngMvcF-gkpRi0bUZjGaH6FUHb_xGDt6aHmM__ghfkmH70B4fWiuCj8y8uj3oImZY8d3NFWWHuGF-3hCPEd_uEOyE_nw4w8ueXi24znCHOHxSWtw9BnSBzrSHF2Y4S0e_EioZoh9XMGfo2dqNMeP80aQPM5xGT9efMeTYL-KIqmHdDraHs-P8IcYjoj0I7_Q43iJ9BF64nSKKth2SjG-cvCHH-2OL8txHsUt9HhF4LiK5j16lAf1FkjvQiN55FSOkkOPkmj4GK-OH80eIeyh98HhE_qhPwjzKAV-HJ2OZkd4Q_vhp0d_6Id-_IeWW9CKoP3RKM-Bo3mOfvhxND_6HMgZ6EfXHB-8Q8-iok1znOi-ozmx54P2Dg5V_PCgLxy8KiH6C0cbHU3Ebtiho9Rxw8er47tw7jgRNxl84ziPJ-B1_DiNNClzaGSCvMGPGxePMD5qZYEuAwdTXYSYcIkmodc2nMqg_WgqBk_yBdVx0vCjQD8uhNRxXTgvVFSOSOmx61C1KMaNsFwM93h-PBdmFm8o45nxDabx48cTbGl4hHuhasjSwPtxPvAV1A7yQMukREERR-nxL8j-EbWYQ8sj4joABQQmjQhkjLFCKSAo4QoxYiQwQhgmkGjCKGAIMMA4BIwQwjhAFMBUCCUAYEIxpYxUCDlEjJYOScSMgsIIAgADwjKEFBAUCkMEMYAagoARzAAHCDCIISKANkgYYBiQwgDDjHEMIGWZFUBQLhgohBGkhECOMEAMIYghogTgQghgiSLCYUegAsJApIQjxABNDFWCa6AIAQ4Q4KgAgIABgGDCMNGIMgQJRQAQTACpgBNIJkUcBMkpoKAgXCjAgAAGKIcYIVAYbZgwggkEmKLEiYGYAYQQShFAAAQBFEEAEuEIgwYRQoARnBkAmAGMEAGFGIgQBigCwAkABEIA'
TEST_2_FP_RAW = [-772063964, -772066012, -1007079116, -1011011276, -1027788492, -1029889740, -1031261916, -1031269084, -1031270620, -1031021744, -1031079600, -1031078425, -1031032346, -1018580506, -1018646290, -1022775046, -1056337446, -1056261686, -1073039030, -1073039013, -1068976005, -1001867175, -733365175, -733302711, -737435575, -603332504, -603365272, -737516311, -188076117, -175490390, -745993558, -758642022, -767030630, -1034347878, -1038412133, -1039584631, -1039654264, -1034345784, -1030086056, -1011141092, -1045873092, -1045939676, -1045947803, -2132276505, -2132259914, -2140415082, -2140472938, -2140481130, -2140546634, -2140603929, -2144802523, -2144536267, -2123540203, -2115147515, -2081855203, -2098598611, -2098606801, -2098606737, -2106995329, -2090217989, -2123638309, -2140477173, -2140477173, -2123634405, -2106992325, -2107061957, -2107061991, -2107007715, -1033140963, -1023769329, -1025864433, -1026913002, -1010133962, -1017409482, -1017540482, -1022848902, -1056337830, -1056271030, -1056261302, -1073038501, -1068975271, -1060587447, -993477559, -1001672631, -737435575, -737566615, -737550231, -737419031, 1422585259, 1955228586, 1367940010, 1388841914, 1380453258, 1376328586, 1376458634, 1107956618, 1107899017, 1113072281, 1121657497, 1119494811, 1135224479, 1135226615, 1139489511, 1130891874, 1126713938, 1109977859, 1114237187, 1122691331, 1122695431, 1122687255, 1114233125, 1130944869, 1126746469, 1097554247, 1105885511, 1105885511, 1135270230, 1122523494, 1114135910, 1109939695, 1093236223, 1076520335, 1080714635, 1089107851, 11092923, 11010986, 15209450, 15492074, 7103274, 2913082, 2905882, 2940681, 2947848, 7138056, 32303368, 61716744, 44932552, 1118668232, 1118406137, 1122600424, 1110167912, 1110167848, 1110106424, 1122689305, 1118495003, 1118478714, 1118540010, 1122599146, 1110016234, 1110147562, 1110094153, 1076535560, 1076538376, -1058363384, -794183656, -794249176, -790063064, -261519320, -261519319, -529562582, -529628886, -530153430, -530280406, -534465494, -534459350, -517027794, -517027778, -517056387, 1630428508, 1634606924, 1643060940, -508616995, -508740929, -475252054, -487834709, -496223301, -496231493, -496092485, -488752486, -489735542, -494125366, -494125542, 1641889598, 1627335998, 1617898782, 1613703454, 1614756622, 537664270, 541854222, 541854238, 541874782, 558651982, 558168910, 558168910, 558168398, 566360398, 1636039038, 1669593454, 1938028670, 1942087766, 1942087766, 1665394807, 1631779173, 1640192868, 1640221300, 1640483428, 1640487796, 1631902020, 1627682884, 553932868, 554654068, 554589029, 567179879, 562985575, 562846279, 562879301, 558684487, 554678646, 554678646, 558873462, 567262070, 563067366, 562936022, 567064775, 558692565, 1628436725, 1661925605, 1661893095, 1666087909, 592329573, 567032663, 567032133, 567032132, 1640840020, 1909340900, 1909340900, -238142748, -775079212, -775152956, -1043580220, -1047774524, -2121450764, -2138162460, -2138162460, -2138232091, -2121520409, -2117330313, -2124670345, -2124604585, -2092205227, -2083848891, -2083787451, -2117489195, -2117550619, -2124902943, -2139517453, -2139383405, -2122597997, -2122598221, -2093090639, -2095162991, -2107737727, -2107754111, -2108805231, -2099495199, -2099499291, -2097402137, -2098451465, -2098709513, -2098717737, -2081859113, -2123773481, -2140434985, -2140496425, -2140428906, -2132171338, -2132236874, -2065124170, -2023181130, -2039964482, -2044162882, -2044098306, -2111137761, -2111117043, -2111165684, -2144720372, -2140460532, -2132132340, -2136328643, -2136407507, -2136471761, -2136471761, -2132277457, -2114187977, -2091268651, -2083809851, -2083872379, -2117361257, -2117552729, -2141681241, -2139584009, -2143708937, -2143618857, -2126874411, -2126894891, -2093339196, -2105923644, -2099628348, -2103836012, -2103966047, -2099751259, -2097639449, -2031579161, -2039763466, -2031375914, -2014728234, -2056675370, -2056548654, -2073127278, -2077251950, -2077233262, -2077266510, -2077332302, -2073154382, -2014434126, -2031143722, -2039794617, -2039792379, -2039792868, -2107033028, -2081936836, -2136462820, -2136265204, -2136263155, -2136456385, -2136456401, -2132228626, -2122791506, -2091070041, -2107647561, -2108769915, -2100384379]
def create_sample_data(ctx):
acct1 = Account(name='user1', apikey='user1_api_key')
ctx.db.add(acct1)
app1 = Application(name='app1', apikey='app1_api_key', version='1.0', account=acct1)
ctx.db.add(app1)
tr1 = Track(gid='eb31d1c3-950e-468b-9e36-e46fa75b1291')
ctx.db.add(tr1)
m1 = Meta(track='Custom Track', artist='Custom Artist')
ctx.db.add(m1)
ctx.db.add(TrackMeta(track=tr1, meta=m1, submission_count=1))
fp1 = Fingerprint(id=1, track=tr1, fingerprint=TEST_2_FP_RAW, length=TEST_2_LENGTH, submission_count=1)
ctx.db.add(fp1)
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import pytest
from webtest import TestApp, AppError
from nose.tools import assert_equals
from acoustid.api import serialize_response
def test_serialize_json():
data = {