...
 
Commits (10)
...@@ -20,12 +20,19 @@ test: ...@@ -20,12 +20,19 @@ test:
- redis - redis
- name: quay.io/acoustid/postgresql:master - name: quay.io/acoustid/postgresql:master
alias: postgres alias: postgres
- name: quay.io/acoustid/acoustid-index:master
alias: acoustid-index
variables: variables:
POSTGRES_USER: acoustid POSTGRES_USER: acoustid
POSTGRES_PASSWORD: acoustid POSTGRES_PASSWORD: acoustid
POSTGRES_DB: acoustid_test POSTGRES_DB: acoustid_test
ACOUSTID_TEST_POSTGRES_HOST: postgres ACOUSTID_TEST_DB_TWO_PHASE_COMMIT: 0
ACOUSTID_TEST_DB_DEFAULT_HOST: postgres
ACOUSTID_TEST_DB_DEFAULT_NAME: acoustid_test
ACOUSTID_TEST_DB_SLOW_HOST: postgres
ACOUSTID_TEST_DB_SLOW_NAME: acoustid_test
ACOUSTID_TEST_REDIS_HOST: redis ACOUSTID_TEST_REDIS_HOST: redis
ACOUSTID_TEST_INDEX_HOST: acoustid-index
cache: cache:
key: $CI_JOB_NAME-$CI_COMMIT_REF_SLUG key: $CI_JOB_NAME-$CI_COMMIT_REF_SLUG
paths: paths:
......
[database] [database]
two_phase_commit=yes
[database:default]
host=127.0.0.1 host=127.0.0.1
port=5432 port=5432
user=acoustid user=acoustid
name=acoustid_test name=acoustid_test
password=acoustid password=acoustid
[database:slow]
host=127.0.0.1
port=5432
user=acoustid
name=acoustid_slow_test
password=acoustid
[logging] [logging]
level=WARNING level=WARNING
level.sqlalchemy=WARNING level.sqlalchemy=WARNING
syslog=yes syslog=yes
syslog_facility=local1 syslog_facility=local1
[index]
host=127.0.0.1
port=6080
[redis] [redis]
host=127.0.0.1 host=127.0.0.1
port=6379 port=6379
......
[database] [database]
name=acoustid
user=acoustid user=acoustid
password=XXX password=XXX
superuser=postgres host=localhost
port=5432
[database_slow]
name=acoustid name=acoustid
user=acoustid
password=XXX
host=localhost host=localhost
port=5432 port=5432
......
This diff is collapsed.
...@@ -18,8 +18,10 @@ def read_env_item(obj, key, name, convert=None): ...@@ -18,8 +18,10 @@ def read_env_item(obj, key, name, convert=None):
value = None value = None
if name in os.environ: if name in os.environ:
value = os.environ[name] value = os.environ[name]
if name + '_FILE' in os.environ: logger.info('Reading config value from environment variable %s', name)
elif name + '_FILE' in os.environ:
value = open(os.environ[name + '_FILE']).read().strip() value = open(os.environ[name + '_FILE']).read().strip()
logger.info('Reading config value from environment variable %s', name + '_FILE')
if value is not None: if value is not None:
if convert is not None: if convert is not None:
value = convert(value) value = convert(value)
...@@ -39,6 +41,35 @@ class BaseConfig(object): ...@@ -39,6 +41,35 @@ class BaseConfig(object):
pass pass
class DatabasesConfig(BaseConfig):
def __init__(self):
self.databases = {
'default': DatabaseConfig(),
'slow': DatabaseConfig(),
}
self.use_two_phase_commit = False
def create_engines(self, **kwargs):
engines = {}
for name, db_config in self.databases.items():
engines[name] = db_config.create_engine(**kwargs)
return engines
def read_section(self, parser, section):
if parser.has_option(section, 'two_phase_commit'):
self.use_two_phase_commit = parser.getboolean(section, 'two_phase_commit')
for name, sub_config in self.databases.items():
sub_section = '{}:{}'.format(section, name)
sub_config.read_section(parser, sub_section)
def read_env(self, prefix):
read_env_item(self, 'use_two_phase_commit', prefix + 'TWO_PHASE_COMMIT', convert=str_to_bool)
for name, sub_config in self.databases.items():
sub_prefix = prefix + name.upper() + '_'
sub_config.read_env(sub_prefix)
class DatabaseConfig(BaseConfig): class DatabaseConfig(BaseConfig):
def __init__(self): def __init__(self):
...@@ -103,21 +134,21 @@ class DatabaseConfig(BaseConfig): ...@@ -103,21 +134,21 @@ class DatabaseConfig(BaseConfig):
if parser.has_option(section, 'password'): if parser.has_option(section, 'password'):
self.password = parser.get(section, 'password') self.password = parser.get(section, 'password')
if parser.has_option(section, 'pool_size'): if parser.has_option(section, 'pool_size'):
self.password = parser.getint(section, 'pool_size') self.pool_size = parser.getint(section, 'pool_size')
if parser.has_option(section, 'pool_recycle'): if parser.has_option(section, 'pool_recycle'):
self.password = parser.getint(section, 'pool_recycle') self.pool_recycle = parser.getint(section, 'pool_recycle')
if parser.has_option(section, 'pool_pre_ping'): if parser.has_option(section, 'pool_pre_ping'):
self.password = parser.getboolean(section, 'pool_pre_ping') self.pool_pre_ping = parser.getboolean(section, 'pool_pre_ping')
def read_env(self, prefix): def read_env(self, prefix):
read_env_item(self, 'name', prefix + 'POSTGRES_DB') read_env_item(self, 'name', prefix + 'NAME')
read_env_item(self, 'host', prefix + 'POSTGRES_HOST') read_env_item(self, 'host', prefix + 'HOST')
read_env_item(self, 'port', prefix + 'POSTGRES_PORT', convert=int) read_env_item(self, 'port', prefix + 'PORT', convert=int)
read_env_item(self, 'user', prefix + 'POSTGRES_USER') read_env_item(self, 'user', prefix + 'USER')
read_env_item(self, 'password', prefix + 'POSTGRES_PASSWORD') read_env_item(self, 'password', prefix + 'PASSWORD')
read_env_item(self, 'pool_size', prefix + 'POSTGRES_POOL_SIZE', convert=int) read_env_item(self, 'pool_size', prefix + 'POOL_SIZE', convert=int)
read_env_item(self, 'pool_recycle', prefix + 'POSTGRES_POOL_RECYCLE', convert=int) read_env_item(self, 'pool_recycle', prefix + 'POOL_RECYCLE', convert=int)
read_env_item(self, 'pool_pre_ping', prefix + 'POSTGRES_POOL_PRE_PING', convert=str_to_bool) read_env_item(self, 'pool_pre_ping', prefix + 'POOL_PRE_PING', convert=str_to_bool)
class IndexConfig(BaseConfig): class IndexConfig(BaseConfig):
...@@ -342,7 +373,7 @@ class RateLimiterConfig(BaseConfig): ...@@ -342,7 +373,7 @@ class RateLimiterConfig(BaseConfig):
class Config(object): class Config(object):
def __init__(self): def __init__(self):
self.database = DatabaseConfig() self.databases = DatabasesConfig()
self.logging = LoggingConfig() self.logging = LoggingConfig()
self.website = WebSiteConfig() self.website = WebSiteConfig()
self.index = IndexConfig() self.index = IndexConfig()
...@@ -357,7 +388,7 @@ class Config(object): ...@@ -357,7 +388,7 @@ class Config(object):
logger.info("Loading configuration file %s", path) logger.info("Loading configuration file %s", path)
parser = ConfigParser.RawConfigParser() parser = ConfigParser.RawConfigParser()
parser.read(path) parser.read(path)
self.database.read(parser, 'database') self.databases.read(parser, 'database')
self.logging.read(parser, 'logging') self.logging.read(parser, 'logging')
self.website.read(parser, 'website') self.website.read(parser, 'website')
self.index.read(parser, 'index') self.index.read(parser, 'index')
...@@ -373,7 +404,7 @@ class Config(object): ...@@ -373,7 +404,7 @@ class Config(object):
prefix = 'ACOUSTID_TEST_' prefix = 'ACOUSTID_TEST_'
else: else:
prefix = 'ACOUSTID_' prefix = 'ACOUSTID_'
self.database.read_env(prefix) self.databases.read_env(prefix + 'DB_')
self.logging.read_env(prefix) self.logging.read_env(prefix)
self.website.read_env(prefix) self.website.read_env(prefix)
self.index.read_env(prefix) self.index.read_env(prefix)
......
...@@ -21,3 +21,6 @@ FINGERPRINT_MAX_LENGTH_DIFF = 7 ...@@ -21,3 +21,6 @@ FINGERPRINT_MAX_LENGTH_DIFF = 7
FINGERPRINT_MAX_ALLOWED_LENGTH_DIFF = 30 FINGERPRINT_MAX_ALLOWED_LENGTH_DIFF = 30
MAX_REQUESTS_PER_SECOND = 3 MAX_REQUESTS_PER_SECOND = 3
MAX_FOREIGNID_NAMESPACE_LENGTH = 10
MAX_FOREIGNID_VALUE_LENGTH = 64
...@@ -28,6 +28,7 @@ def insert_submission(conn, data): ...@@ -28,6 +28,7 @@ def insert_submission(conn, data):
'source_id': data.get('source_id'), 'source_id': data.get('source_id'),
'format_id': data.get('format_id'), 'format_id': data.get('format_id'),
'meta_id': data.get('meta_id'), 'meta_id': data.get('meta_id'),
'foreignid': data.get('foreignid'),
'foreignid_id': data.get('foreignid_id'), 'foreignid_id': data.get('foreignid_id'),
}) })
id = conn.execute(insert_stmt).inserted_primary_key[0] id = conn.execute(insert_stmt).inserted_primary_key[0]
......
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from acoustid.tables import metadata
Session = sessionmaker() Session = sessionmaker()
def get_bind_args(engines):
binds = {}
for table in metadata.sorted_tables:
bind_key = table.info.get('bind_key', 'default')
if bind_key != 'default':
binds[table] = engines[bind_key]
return {'bind': engines['default'], 'binds': binds}
def get_session_args(script):
kwargs = {'twophase': script.config.databases.use_two_phase_commit}
kwargs.update(get_bind_args(script.db_engines))
return kwargs
class DatabaseContext(object): class DatabaseContext(object):
def __init__(self, bind): def __init__(self, script):
self.session = Session(bind=bind) self.session = Session(**get_session_args(script))
def __enter__(self): def __enter__(self):
return self return self
......
...@@ -79,3 +79,11 @@ class StatsLookups(Base): ...@@ -79,3 +79,11 @@ class StatsLookups(Base):
__table__ = tables.stats_lookups __table__ = tables.stats_lookups
application = relationship('Application') application = relationship('Application')
class Submission(Base):
__table__ = tables.submission
class SubmissionResult(Base):
__table__ = tables.submission_result
...@@ -11,11 +11,26 @@ from optparse import OptionParser ...@@ -11,11 +11,26 @@ from optparse import OptionParser
from acoustid.config import Config from acoustid.config import Config
from acoustid.indexclient import IndexClientPool from acoustid.indexclient import IndexClientPool
from acoustid.utils import LocalSysLogHandler from acoustid.utils import LocalSysLogHandler
from acoustid.db import DatabaseContext
from acoustid._release import GIT_RELEASE from acoustid._release import GIT_RELEASE
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ScriptContext(object):
def __init__(self, db, redis, index):
self.db = db
self.redis = redis
self.index = index
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.db.close()
class Script(object): class Script(object):
def __init__(self, config_path, tests=False): def __init__(self, config_path, tests=False):
...@@ -23,25 +38,30 @@ class Script(object): ...@@ -23,25 +38,30 @@ class Script(object):
if config_path: if config_path:
self.config.read(config_path) self.config.read(config_path)
self.config.read_env(tests=tests) self.config.read_env(tests=tests)
if tests:
self.engine = sqlalchemy.create_engine(self.config.database.create_url(), create_engine_kwargs = {'poolclass': sqlalchemy.pool.AssertionPool} if tests else {}
poolclass=sqlalchemy.pool.AssertionPool) self.db_engines = self.config.databases.create_engines(**create_engine_kwargs)
else:
self.engine = sqlalchemy.create_engine(self.config.database.create_url())
if not self.config.index.host: if not self.config.index.host:
self.index = None self.index = None
else: else:
self.index = IndexClientPool(host=self.config.index.host, self.index = IndexClientPool(host=self.config.index.host,
port=self.config.index.port, port=self.config.index.port,
recycle=60) recycle=60)
if not self.config.redis.host: if not self.config.redis.host:
self.redis = None self.redis = None
else: else:
self.redis = Redis(host=self.config.redis.host, self.redis = Redis(host=self.config.redis.host,
port=self.config.redis.port) port=self.config.redis.port)
self._console_logging_configured = False self._console_logging_configured = False
self.setup_logging() self.setup_logging()
@property
def engine(self):
return self.db_engines['default']
def setup_logging(self): def setup_logging(self):
for logger_name, level in sorted(self.config.logging.levels.items()): for logger_name, level in sorted(self.config.logging.levels.items()):
logging.getLogger(logger_name).setLevel(level) logging.getLogger(logger_name).setLevel(level)
...@@ -66,6 +86,10 @@ class Script(object): ...@@ -66,6 +86,10 @@ class Script(object):
def setup_sentry(self): def setup_sentry(self):
sentry_sdk.init(self.config.sentry.script_dsn, release=GIT_RELEASE) sentry_sdk.init(self.config.sentry.script_dsn, release=GIT_RELEASE)
def context(self):
db = DatabaseContext(self).session
return ScriptContext(db=db, redis=self.redis, index=self.index)
def run_script(func, option_cb=None, master_only=False): def run_script(func, option_cb=None, master_only=False):
parser = OptionParser() parser = OptionParser()
......
...@@ -9,7 +9,7 @@ from cStringIO import StringIO ...@@ -9,7 +9,7 @@ from cStringIO import StringIO
from werkzeug.exceptions import HTTPException from werkzeug.exceptions import HTTPException
from werkzeug.routing import Map, Rule, Submount from werkzeug.routing import Map, Rule, Submount
from werkzeug.wrappers import Request from werkzeug.wrappers import Request
from werkzeug.contrib.fixers import ProxyFix from werkzeug.middleware.proxy_fix import ProxyFix
from acoustid.script import Script from acoustid.script import Script
from acoustid._release import GIT_RELEASE from acoustid._release import GIT_RELEASE
import acoustid.api.v1 import acoustid.api.v1
...@@ -53,12 +53,15 @@ admin_url_rules = [ ...@@ -53,12 +53,15 @@ admin_url_rules = [
class Server(Script): class Server(Script):
def __init__(self, config_path): def __init__(self, config_path, **kwargs):
super(Server, self).__init__(config_path) super(Server, self).__init__(config_path, **kwargs)
url_rules = api_url_rules + admin_url_rules url_rules = api_url_rules + admin_url_rules
self.url_map = Map(url_rules, strict_slashes=False) self.url_map = Map(url_rules, strict_slashes=False)
def __call__(self, environ, start_response): def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
urls = self.url_map.bind_to_environ(environ) urls = self.url_map.bind_to_environ(environ)
handler = None handler = None
try: try:
...@@ -112,16 +115,16 @@ def add_cors_headers(app): ...@@ -112,16 +115,16 @@ def add_cors_headers(app):
return wrapped_app return wrapped_app
def make_application(config_path): def make_application(config_path, **kwargs):
"""Construct a WSGI application for the AcoustID server """Construct a WSGI application for the AcoustID server
:param config_path: path to the server configuration file :param config_path: path to the server configuration file
""" """
server = Server(config_path) server = Server(config_path, **kwargs)
server.setup_sentry() server.setup_sentry()
app = GzipRequestMiddleware(server) server.wsgi_app = GzipRequestMiddleware(server.wsgi_app)
app = ProxyFix(app) server.wsgi_app = ProxyFix(server.wsgi_app)
app = SentryWsgiMiddleware(app) server.wsgi_app = SentryWsgiMiddleware(server.wsgi_app)
app = replace_double_slashes(app) server.wsgi_app = replace_double_slashes(server.wsgi_app)
app = add_cors_headers(app) server.wsgi_app = add_cors_headers(server.wsgi_app)
return server, app return server
import sqlalchemy
import sqlalchemy.event import sqlalchemy.event
from sqlalchemy import ( from sqlalchemy import (
MetaData, Table, Column, Index, MetaData, Table, Column, Index, Sequence,
ForeignKey, CheckConstraint, ForeignKey, CheckConstraint,
Integer, String, DateTime, Boolean, Date, Text, SmallInteger, BigInteger, CHAR, Integer, String, DateTime, Boolean, Date, Text, SmallInteger, BigInteger, CHAR,
DDL, sql, DDL, sql,
...@@ -103,7 +104,7 @@ source = Table('source', metadata, ...@@ -103,7 +104,7 @@ source = Table('source', metadata,
Index('source_idx_uniq', 'application_id', 'account_id', 'version', unique=True), Index('source_idx_uniq', 'application_id', 'account_id', 'version', unique=True),
) )
submission = Table('submission', metadata, submission_old = Table('submission_old', metadata,
Column('id', Integer, primary_key=True), Column('id', Integer, primary_key=True),
Column('fingerprint', ARRAY(Integer), nullable=False), Column('fingerprint', ARRAY(Integer), nullable=False),
Column('length', SmallInteger, CheckConstraint('length>0'), nullable=False), Column('length', SmallInteger, CheckConstraint('length>0'), nullable=False),
...@@ -118,7 +119,55 @@ submission = Table('submission', metadata, ...@@ -118,7 +119,55 @@ submission = Table('submission', metadata,
Column('foreignid_id', Integer, ForeignKey('foreignid.id')), Column('foreignid_id', Integer, ForeignKey('foreignid.id')),
) )
Index('submission_idx_handled', submission.c.id, postgresql_where=submission.c.handled == False) # noqa: E712 Index('submission_idx_handled', submission_old.c.id, postgresql_where=submission_old.c.handled == False) # noqa: E712
submission_id_seq = Sequence('submission_id_seq', metadata=metadata)
submission = Table('submission', metadata,
Column('id', Integer, submission_id_seq, server_default=submission_id_seq.next_value(), primary_key=True),
Column('created', DateTime(timezone=True), server_default=sql.func.current_timestamp(), nullable=False),
Column('handled', Boolean, default=False, server_default=sql.false()),
Column('account_id', Integer, nullable=False), # ForeignKey('account.id')
Column('application_id', Integer, nullable=False), # ForeignKey('application.id')
Column('application_version', String),
Column('fingerprint', ARRAY(Integer), nullable=False),
Column('duration', Integer, CheckConstraint('duration>0'), nullable=False),
Column('bitrate', Integer, CheckConstraint('bitrate>0')),
Column('format', String),
Column('mbid', UUID),
Column('puid', UUID),
Column('foreignid', String),
Column('track', String),
Column('artist', String),
Column('album', String),
Column('album_artist', String),
Column('track_no', Integer),
Column('disc_no', Integer),
Column('year', Integer),
info={'bind_key': 'slow'},
)
submission_result = Table('submission_result', metadata,
Column('submission_id', Integer, primary_key=True, autoincrement=False),
Column('created', DateTime(timezone=True), server_default=sql.func.current_timestamp(), nullable=False),
Column('account_id', Integer, nullable=False), # ForeignKey('account.id')
Column('application_id', Integer, nullable=False), # ForeignKey('application.id')
Column('application_version', String),
Column('fingerprint_id', Integer, nullable=False), # ForeignKey('fingerprint.id')
Column('track_id', Integer, nullable=False), # ForeignKey('track.id')
Column('meta_id', Integer), # ForeignKey('meta.id')
Column('mbid', UUID),
Column('puid', UUID),
Column('foreignid', String),
info={'bind_key': 'slow'},
)
stats = Table('stats', metadata, stats = Table('stats', metadata,
Column('id', Integer, primary_key=True), Column('id', Integer, primary_key=True),
...@@ -193,6 +242,8 @@ fingerprint = Table('fingerprint', metadata, ...@@ -193,6 +242,8 @@ fingerprint = Table('fingerprint', metadata,
Index('fingerprint_idx_track_id', 'track_id'), Index('fingerprint_idx_track_id', 'track_id'),
) )
fingerprint.add_is_dependent_on(track)
fingerprint_source = Table('fingerprint_source', metadata, fingerprint_source = Table('fingerprint_source', metadata,
Column('id', Integer, primary_key=True), Column('id', Integer, primary_key=True),
Column('fingerprint_id', Integer, ForeignKey('fingerprint.id'), nullable=False), Column('fingerprint_id', Integer, ForeignKey('fingerprint.id'), nullable=False),
......
...@@ -9,6 +9,7 @@ import datetime ...@@ -9,6 +9,7 @@ import datetime
import hmac import hmac
import base64 import base64
import six import six
from acoustid.const import MAX_FOREIGNID_NAMESPACE_LENGTH, MAX_FOREIGNID_VALUE_LENGTH
from six.moves.urllib.request import urlopen from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urlencode from six.moves.urllib.parse import urlencode
from logging import Handler from logging import Handler
...@@ -54,7 +55,15 @@ def is_int(s): ...@@ -54,7 +55,15 @@ def is_int(s):
def is_foreignid(s): def is_foreignid(s):
return bool(re.match(r'^[0-9a-z]+:.+$', s)) match = re.match(r'^([0-9a-z]+):(.+)$', s)
if match is None:
return False
namespace, value = match.groups()
if len(namespace) > MAX_FOREIGNID_NAMESPACE_LENGTH:
return False
if len(value) > MAX_FOREIGNID_VALUE_LENGTH:
return False
return True
def singular(plural): def singular(plural):
......
...@@ -5,9 +5,10 @@ import sentry_sdk ...@@ -5,9 +5,10 @@ import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration from sentry_sdk.integrations.flask import FlaskIntegration
from flask import Flask, request, session from flask import Flask, request, session
from flask.sessions import SecureCookieSessionInterface from flask.sessions import SecureCookieSessionInterface
from werkzeug.contrib.fixers import ProxyFix from werkzeug.middleware.proxy_fix import ProxyFix
from sqlalchemy.orm import scoped_session from sqlalchemy.orm import scoped_session
from acoustid.script import Script from acoustid.script import Script
from acoustid.db import get_session_args
from acoustid.web import db from acoustid.web import db
from acoustid.web.views.general import general_page from acoustid.web.views.general import general_page
from acoustid.web.views.user import user_page from acoustid.web.views.user import user_page
...@@ -36,6 +37,8 @@ def make_application(config_filename=None, tests=False): ...@@ -36,6 +37,8 @@ def make_application(config_filename=None, tests=False):
GOOGLE_OAUTH_CLIENT_ID=config.website.google_oauth_client_id, GOOGLE_OAUTH_CLIENT_ID=config.website.google_oauth_client_id,
GOOGLE_OAUTH_CLIENT_SECRET=config.website.google_oauth_client_secret, GOOGLE_OAUTH_CLIENT_SECRET=config.website.google_oauth_client_secret,
) )
if tests:
app.config['TESTING'] = True
app.acoustid_script = script app.acoustid_script = script
app.acoustid_config = config app.acoustid_config = config
...@@ -88,7 +91,7 @@ def make_application(config_filename=None, tests=False): ...@@ -88,7 +91,7 @@ def make_application(config_filename=None, tests=False):
from acoustid.api import get_health_response from acoustid.api import get_health_response
return get_health_response(script, request) return get_health_response(script, request)
db.session_factory.configure(bind=config.database.create_engine()) db.session_factory.configure(**get_session_args(script))
db.session = scoped_session(db.session_factory, scopefunc=get_flask_request_scope) db.session = scoped_session(db.session_factory, scopefunc=get_flask_request_scope)
app.register_blueprint(general_page) app.register_blueprint(general_page)
......
...@@ -33,7 +33,7 @@ def render_page(name, **context): ...@@ -33,7 +33,7 @@ def render_page(name, **context):
text = file.read().decode('utf8') text = file.read().decode('utf8')
text = render_template_string(text, **context) text = render_template_string(text, **context)
md = Markdown(extensions=['meta']) md = Markdown(extensions=['meta'])
md.treeprocessors["flask_links"] = MarkdownFlaskUrlProcessor(md) md.treeprocessors.register(MarkdownFlaskUrlProcessor(md), 'flask_links', 50)
html = md.convert(text) html = md.convert(text)
title = ' '.join(md.Meta.get('title', [])) title = ' '.join(md.Meta.get('title', []))
return render_template('page.html', content=html, title=title) return render_template('page.html', content=html, title=title)
......
...@@ -17,7 +17,7 @@ fi ...@@ -17,7 +17,7 @@ fi
echo "GIT_RELEASE = '$GIT_RELEASE'" > acoustid/_release.py echo "GIT_RELEASE = '$GIT_RELEASE'" > acoustid/_release.py
docker pull $IMAGE:$PREV_VERSION docker pull $IMAGE:$PREV_VERSION || true
docker build --cache-from=$IMAGE:$PREV_VERSION -t $IMAGE:$VERSION . docker build --cache-from=$IMAGE:$PREV_VERSION -t $IMAGE:$VERSION .
docker push $IMAGE:$VERSION docker push $IMAGE:$VERSION
......
CREATE DATABASE "acoustid"; CREATE DATABASE "acoustid";
CREATE DATABASE "acoustid_test"; CREATE DATABASE "acoustid_test";
CREATE DATABASE "acoustid_slow";
CREATE DATABASE "acoustid_slow_test";
\c acoustid \c acoustid
create extension intarray; create extension intarray;
create extension pgcrypto; create extension pgcrypto;
...@@ -12,3 +15,15 @@ create extension intarray; ...@@ -12,3 +15,15 @@ create extension intarray;
create extension pgcrypto; create extension pgcrypto;
create extension acoustid; create extension acoustid;
create extension cube; create extension cube;
\c acoustid_slow
create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
\c acoustid_slow_test
create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
...@@ -2,8 +2,10 @@ ...@@ -2,8 +2,10 @@
set -ex set -ex
/wait-for-it.sh $ACOUSTID_TEST_INDEX_HOST:6080
/wait-for-it.sh $ACOUSTID_TEST_REDIS_HOST:6379 /wait-for-it.sh $ACOUSTID_TEST_REDIS_HOST:6379
/wait-for-it.sh $ACOUSTID_TEST_POSTGRES_HOST:5432 /wait-for-it.sh $ACOUSTID_TEST_DB_DEFAULT_HOST:5432
/wait-for-it.sh $ACOUSTID_TEST_DB_SLOW_HOST:5432
export PIP_CACHE_DIR=`pwd`/pip-cache export PIP_CACHE_DIR=`pwd`/pip-cache
......
...@@ -29,6 +29,8 @@ script_location = alembic ...@@ -29,6 +29,8 @@ script_location = alembic
# are written from script.py.mako # are written from script.py.mako
# output_encoding = utf-8 # output_encoding = utf-8
databases = default, slow
# Logging configuration # Logging configuration
[loggers] [loggers]
keys = root,sqlalchemy,alembic keys = root,sqlalchemy,alembic
......
Generic single-database configuration. AcoustID's multi-database configuration.
\ No newline at end of file
from __future__ import with_statement from __future__ import with_statement
import os import os
import logging
from alembic import context from alembic import context
from sqlalchemy import engine_from_config, pool from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig from logging.config import fileConfig
config = context.config config = context.config
fileConfig(config.config_file_name) fileConfig(config.config_file_name)
logger = logging.getLogger("alembic.env")
import acoustid.tables import acoustid.tables
target_metadata = acoustid.tables.metadata target_metadata = acoustid.tables.metadata
...@@ -17,12 +19,25 @@ acoustid_config = acoustid.config.Config() ...@@ -17,12 +19,25 @@ acoustid_config = acoustid.config.Config()
acoustid_config.read(acoustid_config_filename) acoustid_config.read(acoustid_config_filename)
acoustid_config.read_env() acoustid_config.read_env()
def include_object(obj, name, type, reflected, compare_to): use_two_phase_commit = acoustid_config.databases.use_two_phase_commit
if type == "table" and obj.schema == "musicbrainz":
return False
if type == "column" and not obj.table.schema == "musicbrainz": def include_object(db_name):
return False def inner(obj, name, obj_type, reflected, compare_to):
return True if obj_type == "table":
if obj.schema == "musicbrainz":
return False
bind_key = obj.info.get('bind_key', 'default')
if bind_key != db_name:
return False
if obj_type == "column":
if obj.table.schema == "musicbrainz":
return False
bind_key = obj.table.info.get('bind_key', 'default')
if bind_key != db_name:
return False
return True
return inner
def run_migrations_offline(): def run_migrations_offline():
...@@ -37,13 +52,18 @@ def run_migrations_offline(): ...@@ -37,13 +52,18 @@ def run_migrations_offline():
script output. script output.
""" """
url = acoustid_config.database.create_url() for name, db_config in acoustid_config.databases.databases.items():
context.configure( logger.info("Migrating database %s" % name)
url=url, target_metadata=target_metadata, literal_binds=True,
include_object=include_object)
with context.begin_transaction(): context.configure(
context.run_migrations() url=db_config.create_url(),
target_metadata=target_metadata,
literal_binds=True,
include_object=include_object(name),
)
with context.begin_transaction():
context.run_migrations(engine_name=name)
def run_migrations_online(): def run_migrations_online():
...@@ -53,17 +73,45 @@ def run_migrations_online(): ...@@ -53,17 +73,45 @@ def run_migrations_online():
and associate a connection with the context. and associate a connection with the context.
""" """
connectable = acoustid_config.database.create_engine(poolclass=pool.NullPool) engines = {}
for name, db_config in acoustid_config.databases.databases.items():
with connectable.connect() as connection: engines[name] = rec = {}
context.configure( rec["engine"] = db_config.create_engine(poolclass=pool.NullPool)
connection=connection,
target_metadata=target_metadata, for name, rec in engines.items():
include_object=include_object, engine = rec["engine"]
) rec["connection"] = conn = engine.connect()
with context.begin_transaction(): if use_two_phase_commit:
context.run_migrations() rec["transaction"] = conn.begin_twophase()
else:
rec["transaction"] = conn.begin()
try:
for name, rec in engines.items():
logger.info("Migrating database %s" % name)
context.configure(
connection=rec["connection"],
upgrade_token="%s_upgrades" % name,
downgrade_token="%s_downgrades" % name,
target_metadata=target_metadata,
include_object=include_object(name),
)
context.run_migrations(engine_name=name)
if use_two_phase_commit:
for rec in engines.values():
rec["transaction"].prepare()
for rec in engines.values():
rec["transaction"].commit()
except:
for rec in engines.values():
rec["transaction"].rollback()
raise
finally:
for rec in engines.values():
rec["connection"].close()
if context.is_offline_mode(): if context.is_offline_mode():
......
"""${message} <%!
import re
%>"""${message}
Revision ID: ${up_revision} Revision ID: ${up_revision}
Revises: ${down_revision | comma,n} Revises: ${down_revision | comma,n}
Create Date: ${create_date} Create Date: ${create_date}
""" """
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
revision = ${repr(up_revision)} revision = ${repr(up_revision)}
...@@ -12,13 +18,20 @@ down_revision = ${repr(down_revision)} ...@@ -12,13 +18,20 @@ down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)} branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)} depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade(): def upgrade(engine_name):
${upgrades if upgrades else "pass"} globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
% for db_name in re.split(r',\s*', config.get_main_option("databases")):
def upgrade_${db_name}():
${context.get("%s_upgrades" % db_name, "pass")}
def downgrade(): def downgrade_${db_name}():
${downgrades if downgrades else "pass"} ${context.get("%s_downgrades" % db_name, "pass")}
% endfor
...@@ -16,7 +16,15 @@ from alembic import op ...@@ -16,7 +16,15 @@ from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
def upgrade(): def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.create_foreign_key(op.f('account_google_fk_account_id'), 'account_google', 'account', ['account_id'], ['id']) op.create_foreign_key(op.f('account_google_fk_account_id'), 'account_google', 'account', ['account_id'], ['id'])
op.create_foreign_key(op.f('application_fk_account_id'), 'application', 'account', ['account_id'], ['id']) op.create_foreign_key(op.f('application_fk_account_id'), 'application', 'account', ['account_id'], ['id'])
op.create_foreign_key(op.f('stats_lookups_fk_application_id'), 'stats_lookups', 'application', ['application_id'], ['id']) op.create_foreign_key(op.f('stats_lookups_fk_application_id'), 'stats_lookups', 'application', ['application_id'], ['id'])
...@@ -46,7 +54,7 @@ def upgrade(): ...@@ -46,7 +54,7 @@ def upgrade():
op.alter_column('recording_acoustid', 'created', nullable=False) op.alter_column('recording_acoustid', 'created', nullable=False)
def downgrade(): def downgrade_default():
op.alter_column('recording_acoustid', 'created', nullable=True) op.alter_column('recording_acoustid', 'created', nullable=True)
op.alter_column('track_foreignid_source', 'created', nullable=True) op.alter_column('track_foreignid_source', 'created', nullable=True)
op.alter_column('track_foreignid', 'created', nullable=True) op.alter_column('track_foreignid', 'created', nullable=True)
...@@ -74,3 +82,11 @@ def downgrade(): ...@@ -74,3 +82,11 @@ def downgrade():
op.drop_constraint(op.f('stats_lookups_fk_application_id'), 'stats_lookups', type_='foreignkey') op.drop_constraint(op.f('stats_lookups_fk_application_id'), 'stats_lookups', type_='foreignkey')
op.drop_constraint(op.f('application_fk_account_id'), 'application', type_='foreignkey') op.drop_constraint(op.f('application_fk_account_id'), 'application', type_='foreignkey')
op.drop_constraint(op.f('account_google_fk_account_id'), 'account_google', type_='foreignkey') op.drop_constraint(op.f('account_google_fk_account_id'), 'account_google', type_='foreignkey')
def upgrade_slow():
pass
def downgrade_slow():
pass
"""submission result
Revision ID: 4581a68fa644
Revises: d52d50968cf7
Create Date: 2019-06-08 12:17:40.208947
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '4581a68fa644'
down_revision = 'd52d50968cf7'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
pass
def downgrade_default():
pass
def upgrade_slow():
op.create_table('submission_result',
sa.Column('submission_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('application_version', sa.String(), nullable=True),
sa.Column('fingerprint_id', sa.Integer(), nullable=False),
sa.Column('track_id', sa.Integer(), nullable=False),
sa.Column('meta_id', sa.Integer(), nullable=True),
sa.Column('mbid', postgresql.UUID(), nullable=True),
sa.Column('puid', postgresql.UUID(), nullable=True),
sa.Column('foreignid', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('submission_id', name=op.f('submission_result_pkey'))
)
def downgrade_slow():
op.drop_table('submission_result')
...@@ -16,7 +16,16 @@ from alembic import op ...@@ -16,7 +16,16 @@ from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.dialects import postgresql from sqlalchemy.dialects import postgresql
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.create_table('account_google', op.create_table('account_google',
sa.Column('google_user_id', sa.String(), nullable=False), sa.Column('google_user_id', sa.String(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False), sa.Column('account_id', sa.Integer(), nullable=False),
...@@ -356,7 +365,7 @@ def upgrade(): ...@@ -356,7 +365,7 @@ def upgrade():
) )
def downgrade(): def downgrade_default():
op.drop_table('track_puid_source') op.drop_table('track_puid_source')
op.drop_table('track_meta_source') op.drop_table('track_meta_source')
op.drop_index(op.f('track_mbid_source_idx_track_mbid_id'), table_name='track_mbid_source') op.drop_index(op.f('track_mbid_source_idx_track_mbid_id'), table_name='track_mbid_source')
...@@ -422,3 +431,11 @@ def downgrade(): ...@@ -422,3 +431,11 @@ def downgrade():
op.drop_table('account_stats_control') op.drop_table('account_stats_control')
op.drop_index('account_google_idx_account_id', table_name='account_google') op.drop_index('account_google_idx_account_id', table_name='account_google')
op.drop_table('account_google') op.drop_table('account_google')
def upgrade_slow():
pass
def downgrade_slow():
pass
...@@ -16,7 +16,15 @@ from alembic import op ...@@ -16,7 +16,15 @@ from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
def upgrade(): def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.drop_constraint(u'fingerprint_source_fk_submission_id', 'fingerprint_source', type_='foreignkey') op.drop_constraint(u'fingerprint_source_fk_submission_id', 'fingerprint_source', type_='foreignkey')
op.drop_constraint(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', type_='foreignkey') op.drop_constraint(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', type_='foreignkey')
op.drop_constraint(u'track_mbid_source_fk_submission_id', 'track_mbid_source', type_='foreignkey') op.drop_constraint(u'track_mbid_source_fk_submission_id', 'track_mbid_source', type_='foreignkey')
...@@ -24,9 +32,17 @@ def upgrade(): ...@@ -24,9 +32,17 @@ def upgrade():
op.drop_constraint(u'track_puid_source_fk_submission_id', 'track_puid_source', type_='foreignkey') op.drop_constraint(u'track_puid_source_fk_submission_id', 'track_puid_source', type_='foreignkey')
def downgrade(): def downgrade_default():
op.create_foreign_key(u'track_puid_source_fk_submission_id', 'track_puid_source', 'submission', ['submission_id'], ['id']) op.create_foreign_key(u'track_puid_source_fk_submission_id', 'track_puid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_meta_source_fk_submission_id', 'track_meta_source', 'submission', ['submission_id'], ['id']) op.create_foreign_key(u'track_meta_source_fk_submission_id', 'track_meta_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_mbid_source_fk_submission_id', 'track_mbid_source', 'submission', ['submission_id'], ['id']) op.create_foreign_key(u'track_mbid_source_fk_submission_id', 'track_mbid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', 'submission', ['submission_id'], ['id']) op.create_foreign_key(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'fingerprint_source_fk_submission_id', 'fingerprint_source', 'submission', ['submission_id'], ['id']) op.create_foreign_key(u'fingerprint_source_fk_submission_id', 'fingerprint_source', 'submission', ['submission_id'], ['id'])
def upgrade_slow():
pass
def downgrade_slow():
pass
"""move submissions
Revision ID: d52d50968cf7
Revises: ae7e1e5763ef
Create Date: 2019-06-08 11:04:53.563059
"""
import datetime
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence
from dateutil.relativedelta import relativedelta
# revision identifiers, used by Alembic.
revision = 'd52d50968cf7'
down_revision = 'ae7e1e5763ef'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.rename_table('submission', 'submission_old')
def downgrade_default():
op.rename_table('submission_old', 'submission')
def upgrade_slow():
op.execute(CreateSequence(Sequence('submission_id_seq')))
op.create_table('submission',
sa.Column('id', sa.Integer(), server_default=sa.text("nextval('submission_id_seq')"), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('handled', sa.Boolean(), server_default=sa.text('false'), nullable=True),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('application_version', sa.String(), nullable=True),
sa.Column('fingerprint', postgresql.ARRAY(sa.Integer()), nullable=False),
sa.Column('duration', sa.Integer(), nullable=False),
sa.Column('bitrate', sa.Integer(), nullable=True),
sa.Column('format', sa.String(), nullable=True),
sa.Column('mbid', postgresql.UUID(), nullable=True),
sa.Column('puid', postgresql.UUID(), nullable=True),
sa.Column('foreignid', sa.String(), nullable=True),
sa.Column('track', sa.String(), nullable=True),
sa.Column('artist', sa.String(), nullable=True),
sa.Column('album', sa.String(), nullable=True),
sa.Column('album_artist', sa.String(), nullable=True),
sa.Column('track_no', sa.Integer(), nullable=True),
sa.Column('disc_no', sa.Integer(), nullable=True),
sa.Column('year', sa.Integer(), nullable=True),
postgresql_partition_by='RANGE (created)',
)
one_month = relativedelta(months=1)
partitions_from = datetime.date(2010, 1, 1)
partitions_to = datetime.date.today().replace(month=1, day=1) + relativedelta(months=12)
range_from = partitions_from
while range_from < partitions_to:
range_to = range_from + one_month
op.execute("""
CREATE TABLE IF NOT EXISTS submission_y{range_from.year:04d}m{range_to.month:02d}
PARTITION OF submission
FOR VALUES FROM ('{range_from}') TO ('{range_to}');
ALTER TABLE submission_y{range_from.year:04d}m{range_to.month:02d}
ADD PRIMARY KEY (id);
CREATE INDEX submission_y{range_from.year:04d}m{range_to.month:02d}_idx_handled
ON submission_y{range_from.year:04d}m{range_to.month:02d} (handled);
""".format(range_from=range_from, range_to=range_to))
range_from = range_to
def downgrade_slow():
op.drop_table('submission')
op.execute(DropSequence(Sequence('submission_id_seq')))
...@@ -16,11 +16,27 @@ from alembic import op ...@@ -16,11 +16,27 @@ from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
def upgrade(): def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_default():
op.add_column('account', op.add_column('account',
sa.Column('is_admin', sa.Boolean(), server_default=sa.text('false'), nullable=False) sa.Column('is_admin', sa.Boolean(), server_default=sa.text('false'), nullable=False)
) )
def downgrade(): def downgrade_default():
op.drop_column('account', 'is_admin') op.drop_column('account', 'is_admin')
def upgrade_slow():
pass
def downgrade_slow():
pass
...@@ -6,8 +6,14 @@ services: ...@@ -6,8 +6,14 @@ services:
ports: ports:
- "127.0.0.1:6379:6379" - "127.0.0.1:6379:6379"
index:
image: quay.io/acoustid/acoustid-index:master
ports:
- "127.0.0.1:6080:6080"
postgres: postgres:
image: quay.io/acoustid/postgresql:master image: quay.io/acoustid/postgresql:master
command: ["-c", "max_connections=100", "-c", "max_prepared_transactions=110"]
ports: ports:
- "127.0.0.1:5432:5432" - "127.0.0.1:5432:5432"
volumes: volumes:
......
This diff is collapsed.
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from nose.tools import assert_equals
from acoustid.api import serialize_response
def test_serialize_json():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'json')
assert_equals('application/json; charset=UTF-8', resp.content_type)
expected = b'''{"artists": [{"cities": ["Paris", "Lyon"], "name": "Jean Michel Jarre", "year": 1948}], "status": "ok"}'''
assert_equals(expected, resp.data)
def test_serialize_jsonp():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'jsonp:getData')
assert_equals('application/javascript; charset=UTF-8', resp.content_type)
expected = b'''getData({"artists": [{"cities": ["Paris", "Lyon"], "name": "Jean Michel Jarre", "year": 1948}], "status": "ok"})'''
assert_equals(expected, resp.data)
def test_serialize_xml():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'xml')
assert_equals('text/xml; charset=UTF-8', resp.content_type)
expected = (
'''<?xml version='1.0' encoding='UTF-8'?>\n<response><status>ok</status><artists><artist><cities><city>Paris</city><city>Lyon'''
'''</city></cities><name>Jean Michel Jarre</name><year>1948</year></artist></artists></response>'''
)
assert_equals(expected, resp.data)
def test_serialize_xml_attribute():
data = {'@status': 'ok'}
resp = serialize_response(data, 'xml')
assert_equals('text/xml; charset=UTF-8', resp.content_type)
expected = '''<?xml version='1.0' encoding='UTF-8'?>\n<response status="ok" />'''
assert_equals(expected, resp.data)
from nose.tools import assert_equal
from acoustid.web import db
from tests import make_web_application
app = None
def setup():
global app
app = make_web_application()
app.config['TESTING'] = True
def test_track_page():
client = app.test_client()
rv = client.get('/track/eb31d1c3-950e-468b-9e36-e46fa75b1291')
assert_equal(rv.status_code, 200)
assert_equal(rv.data.count('Custom Track'), 1)
assert_equal(rv.data.count('Custom Artist'), 1)
assert not db.session.registry.has()
...@@ -4,4 +4,26 @@ ...@@ -4,4 +4,26 @@
# #
# pip-compile --output-file requirements_dev.txt requirements_dev.in # pip-compile --output-file requirements_dev.txt requirements_dev.in
# #
nose==1.3.7 atomicwrites==1.3.0 # via pytest
attrs==19.1.0 # via pytest
backports.functools-lru-cache==1.5 # via soupsieve
beautifulsoup4==4.7.1 # via webtest
configparser==3.7.4 # via importlib-metadata
contextlib2==0.5.5 # via importlib-metadata
funcsigs==1.0.2 # via pytest
importlib-metadata==0.17 # via pluggy, pytest
more-itertools==5.0.0 # via pytest
packaging==19.0 # via pytest
pathlib2==2.3.3 # via importlib-metadata, pytest
pluggy==0.12.0 # via pytest
py==1.8.0 # via pytest
pyparsing==2.4.0 # via packaging
pytest==4.6.2
scandir==1.10.0 # via pathlib2
six==1.12.0 # via more-itertools, packaging, pathlib2, pytest, webtest
soupsieve==1.9.1 # via beautifulsoup4
waitress==1.3.0 # via webtest
wcwidth==0.1.7 # via pytest
webob==1.8.5 # via webtest
webtest==2.0.33
zipp==0.5.1 # via importlib-metadata
...@@ -160,7 +160,7 @@ def disable_track_mbid(db, track_mbid, note): ...@@ -160,7 +160,7 @@ def disable_track_mbid(db, track_mbid, note):
def main(script, opts, args): def main(script, opts, args):
db = DatabaseContext(script.engine) db = DatabaseContext(script.db)
min_track_id = db.session.query(sql.func.min(Track.id)).scalar() min_track_id = db.session.query(sql.func.min(Track.id)).scalar()
max_track_id = db.session.query(sql.func.max(Track.id)).scalar() max_track_id = db.session.query(sql.func.max(Track.id)).scalar()
......
...@@ -8,7 +8,7 @@ import os ...@@ -8,7 +8,7 @@ import os
def main(script, opts, args): def main(script, opts, args):
os.execlp('psql', 'psql', *(script.config.database.create_psql_args() + args)) os.execlp('psql', 'psql', *(script.config.databases.databases['default'].create_psql_args() + args))
run_script(main) run_script(main)
This diff is collapsed.
import os
import pytest
from acoustid.config import str_to_bool
from acoustid.tables import metadata
from acoustid.data.fingerprint import update_fingerprint_index
from tests.data import create_sample_data
RECREATE_DB = True
def get_tables_for_bind(metadata, bind_key):
tables = []
for table in metadata.sorted_tables:
if table.info.get('bind_key', 'default') == bind_key:
tables.append(table)
return tables
def create_all(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
metadata.create_all(bind=conn, tables=tables)
def drop_all(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
metadata.drop_all(bind=conn, tables=tables)
def delete_from_all_tables(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
for table in reversed(tables):
conn.execute(table.delete())
def delete_shutdown_file(config):
if os.path.exists(config.website.shutdown_file_path):
os.remove(config.website.shutdown_file_path)
def prepare_database(script):
delete_from_all_tables(metadata, script.db_engines)
with script.context() as ctx:
create_sample_data(ctx)
ctx.db.flush()
update_fingerprint_index(ctx.db.connection(), ctx.index)
ctx.db.commit()
@pytest.fixture(scope='session')
def config_path():
return os.path.dirname(os.path.abspath(__file__)) + '/../acoustid-test.conf'
@pytest.fixture(scope='session')
def script_global(config_path):
from acoustid.script import Script
script = Script(config_path, tests=True)
recreate_db = str_to_bool(os.environ.get('ACOUSTID_TEST_RECREATE', ''))
if recreate_db:
drop_all(metadata, script.db_engines)
create_all(metadata, script.db_engines)
prepare_database(script)