...
 
Commits (16)
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
custom: https://acoustid.org/donate
......@@ -20,12 +20,19 @@ test:
- redis
- name: quay.io/acoustid/postgresql:master
alias: postgres
- name: quay.io/acoustid/acoustid-index:master
alias: acoustid-index
variables:
POSTGRES_USER: acoustid
POSTGRES_PASSWORD: acoustid
POSTGRES_DB: acoustid_test
ACOUSTID_TEST_POSTGRES_HOST: postgres
ACOUSTID_TEST_DB_TWO_PHASE_COMMIT: 0
ACOUSTID_TEST_DB_MAIN_HOST: postgres
ACOUSTID_TEST_DB_MAIN_NAME: acoustid_test
ACOUSTID_TEST_DB_IMPORT_HOST: postgres
ACOUSTID_TEST_DB_IMPORT_NAME: acoustid_test
ACOUSTID_TEST_REDIS_HOST: redis
ACOUSTID_TEST_INDEX_HOST: acoustid-index
cache:
key: $CI_JOB_NAME-$CI_COMMIT_REF_SLUG
paths:
......
[database]
two_phase_commit=yes
[database:main]
host=127.0.0.1
port=5432
user=acoustid
name=acoustid_test
password=acoustid
[database:import]
host=127.0.0.1
port=5432
user=acoustid
name=acoustid_import_test
password=acoustid
[logging]
level=WARNING
level.sqlalchemy=WARNING
syslog=yes
syslog_facility=local1
[index]
host=127.0.0.1
port=6080
[redis]
host=127.0.0.1
port=6379
......
[database]
[databases]
two_phase_commit=false
[database:main]
user=acoustid
password=acoustid
name=acoustid
host=localhost
port=5432
[database:import]
user=acoustid
password=acoustid
name=acoustid_import
host=localhost
port=5432
[database:main_master]
user=acoustid
password=XXX
superuser=postgres
password=acoustid
name=acoustid
host=localhost
port=5432
[database:import_master]
user=acoustid
password=acoustid
name=acoustid_import
host=localhost
port=5432
[cluster]
role=master
base_master_url=https://api.acoustid.org/
......
This diff is collapsed.
......@@ -18,8 +18,10 @@ def read_env_item(obj, key, name, convert=None):
value = None
if name in os.environ:
value = os.environ[name]
if name + '_FILE' in os.environ:
logger.info('Reading config value from environment variable %s', name)
elif name + '_FILE' in os.environ:
value = open(os.environ[name + '_FILE']).read().strip()
logger.info('Reading config value from environment variable %s', name + '_FILE')
if value is not None:
if convert is not None:
value = convert(value)
......@@ -39,6 +41,42 @@ class BaseConfig(object):
pass
class DatabasesConfig(BaseConfig):
def __init__(self):
self.modes = {'local', 'master'}
self.names = {'main', 'import'}
self.databases = {}
for mode in self.modes:
self.databases[mode] = {}
for name in self.names:
self.databases[mode][name] = DatabaseConfig()
self.use_two_phase_commit = False
def create_engines(self, **kwargs):
mode = kwargs.pop('mode', 'local')
engines = {}
for name, db_config in self.databases[mode].items():
engines[name] = db_config.create_engine(**kwargs)
return engines
def read_section(self, parser, section):
if parser.has_option(section, 'two_phase_commit'):
self.use_two_phase_commit = parser.getboolean(section, 'two_phase_commit')
for mode, db_configs in self.databases.items():
for name, sub_config in db_configs.items():
sub_section = '{}:{}{}'.format(section, name, '_' + mode if mode != 'local' else '')
if parser.has_section(sub_section):
sub_config.read_section(parser, sub_section)
def read_env(self, prefix):
read_env_item(self, 'use_two_phase_commit', prefix + 'TWO_PHASE_COMMIT', convert=str_to_bool)
for mode, db_configs in self.databases.items():
for name, sub_config in db_configs.items():
sub_prefix = prefix + name.upper() + '_' + (mode.upper() + '_' if mode != 'local' else '')
sub_config.read_env(sub_prefix)
class DatabaseConfig(BaseConfig):
def __init__(self):
......@@ -103,21 +141,21 @@ class DatabaseConfig(BaseConfig):
if parser.has_option(section, 'password'):
self.password = parser.get(section, 'password')
if parser.has_option(section, 'pool_size'):
self.password = parser.getint(section, 'pool_size')
self.pool_size = parser.getint(section, 'pool_size')
if parser.has_option(section, 'pool_recycle'):
self.password = parser.getint(section, 'pool_recycle')
self.pool_recycle = parser.getint(section, 'pool_recycle')
if parser.has_option(section, 'pool_pre_ping'):
self.password = parser.getboolean(section, 'pool_pre_ping')
self.pool_pre_ping = parser.getboolean(section, 'pool_pre_ping')
def read_env(self, prefix):
read_env_item(self, 'name', prefix + 'POSTGRES_DB')
read_env_item(self, 'host', prefix + 'POSTGRES_HOST')
read_env_item(self, 'port', prefix + 'POSTGRES_PORT', convert=int)
read_env_item(self, 'user', prefix + 'POSTGRES_USER')
read_env_item(self, 'password', prefix + 'POSTGRES_PASSWORD')
read_env_item(self, 'pool_size', prefix + 'POSTGRES_POOL_SIZE', convert=int)
read_env_item(self, 'pool_recycle', prefix + 'POSTGRES_POOL_RECYCLE', convert=int)
read_env_item(self, 'pool_pre_ping', prefix + 'POSTGRES_POOL_PRE_PING', convert=str_to_bool)
read_env_item(self, 'name', prefix + 'NAME')
read_env_item(self, 'host', prefix + 'HOST')
read_env_item(self, 'port', prefix + 'PORT', convert=int)
read_env_item(self, 'user', prefix + 'USER')
read_env_item(self, 'password', prefix + 'PASSWORD')
read_env_item(self, 'pool_size', prefix + 'POOL_SIZE', convert=int)
read_env_item(self, 'pool_recycle', prefix + 'POOL_RECYCLE', convert=int)
read_env_item(self, 'pool_pre_ping', prefix + 'POOL_PRE_PING', convert=str_to_bool)
class IndexConfig(BaseConfig):
......@@ -342,7 +380,7 @@ class RateLimiterConfig(BaseConfig):
class Config(object):
def __init__(self):
self.database = DatabaseConfig()
self.databases = DatabasesConfig()
self.logging = LoggingConfig()
self.website = WebSiteConfig()
self.index = IndexConfig()
......@@ -357,7 +395,7 @@ class Config(object):
logger.info("Loading configuration file %s", path)
parser = ConfigParser.RawConfigParser()
parser.read(path)
self.database.read(parser, 'database')
self.databases.read(parser, 'database')
self.logging.read(parser, 'logging')
self.website.read(parser, 'website')
self.index.read(parser, 'index')
......@@ -373,7 +411,7 @@ class Config(object):
prefix = 'ACOUSTID_TEST_'
else:
prefix = 'ACOUSTID_'
self.database.read_env(prefix)
self.databases.read_env(prefix + 'DB_')
self.logging.read_env(prefix)
self.website.read_env(prefix)
self.index.read_env(prefix)
......
......@@ -21,3 +21,6 @@ FINGERPRINT_MAX_LENGTH_DIFF = 7
FINGERPRINT_MAX_ALLOWED_LENGTH_DIFF = 30
MAX_REQUESTS_PER_SECOND = 3
MAX_FOREIGNID_NAMESPACE_LENGTH = 10
MAX_FOREIGNID_VALUE_LENGTH = 64
......@@ -28,6 +28,7 @@ def insert_submission(conn, data):
'source_id': data.get('source_id'),
'format_id': data.get('format_id'),
'meta_id': data.get('meta_id'),
'foreignid': data.get('foreignid'),
'foreignid_id': data.get('foreignid_id'),
})
id = conn.execute(insert_stmt).inserted_primary_key[0]
......
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Session, sessionmaker
from acoustid.tables import metadata
Session = sessionmaker()
class RoutingSession(Session):
def __init__(self, *args, **kwargs):
self._engines = kwargs.pop('engines', {})
super(RoutingSession, self).__init__(*args, **kwargs)
def get_bind(self, mapper=None, clause=None):
return super(RoutingSession, self).get_bind(mapper, clause)
Session = sessionmaker(class_=RoutingSession)
def get_bind_args(engines):
binds = {}
for table in metadata.sorted_tables:
bind_key = table.info.get('bind_key', 'main')
if bind_key != 'main':
binds[table] = engines[bind_key]
return {'bind': engines['main'], 'binds': binds}
def get_session_args(script):
kwargs = {
'twophase': script.config.databases.use_two_phase_commit,
'engines': script.db_engines,
}
kwargs.update(get_bind_args(script.db_engines))
return kwargs
class DatabaseContext(object):
def __init__(self, bind):
self.session = Session(bind=bind)
def __init__(self, script):
self.session = Session(**get_session_args(script))
def __enter__(self):
return self
......
......@@ -38,6 +38,9 @@ class IndexClient(object):
self._buffer = ''
self._connect()
def __str__(self):
return '{}:{}'.format(self.host, self.port)
def __repr__(self):
return '<%s(%s, %s) instance at %s>' % (self.__class__.__name__,
self.host, self.port, hex(id(self)))
......@@ -164,6 +167,9 @@ class IndexClientWrapper(object):
self.get_attribute = self._client.get_attribute
self.set_attribute = self._client.set_attribute
def __str__(self):
return str(self._client)
def close(self):
if self._client.in_transaction:
self._client.rollback()
......
......@@ -79,3 +79,11 @@ class StatsLookups(Base):
__table__ = tables.stats_lookups
application = relationship('Application')
class Submission(Base):
__table__ = tables.submission
class SubmissionResult(Base):
__table__ = tables.submission_result
......@@ -11,11 +11,26 @@ from optparse import OptionParser
from acoustid.config import Config
from acoustid.indexclient import IndexClientPool
from acoustid.utils import LocalSysLogHandler
from acoustid.db import DatabaseContext
from acoustid._release import GIT_RELEASE
logger = logging.getLogger(__name__)
class ScriptContext(object):
def __init__(self, db, redis, index):
self.db = db
self.redis = redis
self.index = index
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.db.close()
class Script(object):
def __init__(self, config_path, tests=False):
......@@ -23,25 +38,30 @@ class Script(object):
if config_path:
self.config.read(config_path)
self.config.read_env(tests=tests)
if tests:
self.engine = sqlalchemy.create_engine(self.config.database.create_url(),
poolclass=sqlalchemy.pool.AssertionPool)
else:
self.engine = sqlalchemy.create_engine(self.config.database.create_url())
create_engine_kwargs = {'poolclass': sqlalchemy.pool.AssertionPool} if tests else {}
self.db_engines = self.config.databases.create_engines(**create_engine_kwargs)
if not self.config.index.host:
self.index = None
else:
self.index = IndexClientPool(host=self.config.index.host,
port=self.config.index.port,
recycle=60)
if not self.config.redis.host:
self.redis = None
else:
self.redis = Redis(host=self.config.redis.host,
port=self.config.redis.port)
self._console_logging_configured = False
self.setup_logging()
@property
def engine(self):
return self.db_engines['main']
def setup_logging(self):
for logger_name, level in sorted(self.config.logging.levels.items()):
logging.getLogger(logger_name).setLevel(level)
......@@ -66,6 +86,10 @@ class Script(object):
def setup_sentry(self):
sentry_sdk.init(self.config.sentry.script_dsn, release=GIT_RELEASE)
def context(self):
db = DatabaseContext(self).session
return ScriptContext(db=db, redis=self.redis, index=self.index)
def run_script(func, option_cb=None, master_only=False):
parser = OptionParser()
......
......@@ -9,7 +9,7 @@ from cStringIO import StringIO
from werkzeug.exceptions import HTTPException
from werkzeug.routing import Map, Rule, Submount
from werkzeug.wrappers import Request
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.middleware.proxy_fix import ProxyFix
from acoustid.script import Script
from acoustid._release import GIT_RELEASE
import acoustid.api.v1
......@@ -53,12 +53,15 @@ admin_url_rules = [
class Server(Script):
def __init__(self, config_path):
super(Server, self).__init__(config_path)
def __init__(self, config_path, **kwargs):
super(Server, self).__init__(config_path, **kwargs)
url_rules = api_url_rules + admin_url_rules
self.url_map = Map(url_rules, strict_slashes=False)
def __call__(self, environ, start_response):
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
urls = self.url_map.bind_to_environ(environ)
handler = None
try:
......@@ -112,16 +115,16 @@ def add_cors_headers(app):
return wrapped_app
def make_application(config_path):
def make_application(config_path, **kwargs):
"""Construct a WSGI application for the AcoustID server
:param config_path: path to the server configuration file
"""
server = Server(config_path)
server = Server(config_path, **kwargs)
server.setup_sentry()
app = GzipRequestMiddleware(server)
app = ProxyFix(app)
app = SentryWsgiMiddleware(app)
app = replace_double_slashes(app)
app = add_cors_headers(app)
return server, app
server.wsgi_app = GzipRequestMiddleware(server.wsgi_app)
server.wsgi_app = ProxyFix(server.wsgi_app)
server.wsgi_app = SentryWsgiMiddleware(server.wsgi_app)
server.wsgi_app = replace_double_slashes(server.wsgi_app)
server.wsgi_app = add_cors_headers(server.wsgi_app)
return server
import sqlalchemy
import sqlalchemy.event
from sqlalchemy import (
MetaData, Table, Column, Index,
MetaData, Table, Column, Index, Sequence,
ForeignKey, CheckConstraint,
Integer, String, DateTime, Boolean, Date, Text, SmallInteger, BigInteger, CHAR,
DDL, sql,
......@@ -103,7 +104,7 @@ source = Table('source', metadata,
Index('source_idx_uniq', 'application_id', 'account_id', 'version', unique=True),
)
submission = Table('submission', metadata,
submission_old = Table('submission_old', metadata,
Column('id', Integer, primary_key=True),
Column('fingerprint', ARRAY(Integer), nullable=False),
Column('length', SmallInteger, CheckConstraint('length>0'), nullable=False),
......@@ -118,7 +119,55 @@ submission = Table('submission', metadata,
Column('foreignid_id', Integer, ForeignKey('foreignid.id')),
)
Index('submission_idx_handled', submission.c.id, postgresql_where=submission.c.handled == False) # noqa: E712
Index('submission_idx_handled', submission_old.c.id, postgresql_where=submission_old.c.handled == False) # noqa: E712
submission_id_seq = Sequence('submission_id_seq', metadata=metadata)
submission = Table('submission', metadata,
Column('id', Integer, submission_id_seq, server_default=submission_id_seq.next_value(), primary_key=True),
Column('created', DateTime(timezone=True), server_default=sql.func.current_timestamp(), nullable=False),
Column('handled', Boolean, default=False, server_default=sql.false()),
Column('account_id', Integer, nullable=False), # ForeignKey('account.id')
Column('application_id', Integer, nullable=False), # ForeignKey('application.id')
Column('application_version', String),
Column('fingerprint', ARRAY(Integer), nullable=False),
Column('duration', Integer, CheckConstraint('duration>0'), nullable=False),
Column('bitrate', Integer, CheckConstraint('bitrate>0')),
Column('format', String),
Column('mbid', UUID),
Column('puid', UUID),
Column('foreignid', String),
Column('track', String),
Column('artist', String),
Column('album', String),
Column('album_artist', String),
Column('track_no', Integer),
Column('disc_no', Integer),
Column('year', Integer),
info={'bind_key': 'import'},
)
submission_result = Table('submission_result', metadata,
Column('submission_id', Integer, primary_key=True, autoincrement=False),
Column('created', DateTime(timezone=True), server_default=sql.func.current_timestamp(), nullable=False),
Column('account_id', Integer, nullable=False), # ForeignKey('account.id')
Column('application_id', Integer, nullable=False), # ForeignKey('application.id')
Column('application_version', String),
Column('fingerprint_id', Integer, nullable=False), # ForeignKey('fingerprint.id')
Column('track_id', Integer, nullable=False), # ForeignKey('track.id')
Column('meta_id', Integer), # ForeignKey('meta.id')
Column('mbid', UUID),
Column('puid', UUID),
Column('foreignid', String),
info={'bind_key': 'import'},
)
stats = Table('stats', metadata,
Column('id', Integer, primary_key=True),
......@@ -193,6 +242,8 @@ fingerprint = Table('fingerprint', metadata,
Index('fingerprint_idx_track_id', 'track_id'),
)
fingerprint.add_is_dependent_on(track)
fingerprint_source = Table('fingerprint_source', metadata,
Column('id', Integer, primary_key=True),
Column('fingerprint_id', Integer, ForeignKey('fingerprint.id'), nullable=False),
......
......@@ -9,6 +9,7 @@ import datetime
import hmac
import base64
import six
from acoustid.const import MAX_FOREIGNID_NAMESPACE_LENGTH, MAX_FOREIGNID_VALUE_LENGTH
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urlencode
from logging import Handler
......@@ -54,7 +55,15 @@ def is_int(s):
def is_foreignid(s):
return bool(re.match(r'^[0-9a-z]+:.+$', s))
match = re.match(r'^([0-9a-z]+):(.+)$', s)
if match is None:
return False
namespace, value = match.groups()
if len(namespace) > MAX_FOREIGNID_NAMESPACE_LENGTH:
return False
if len(value) > MAX_FOREIGNID_VALUE_LENGTH:
return False
return True
def singular(plural):
......
# Copyright (C) 2014 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from sqlalchemy.orm import sessionmaker
from acoustid.db import Session
class Database(object):
def __init__(self):
self.session_factory = sessionmaker()
self.session_factory = Session
self.session = None
......
......@@ -5,9 +5,10 @@ import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
from flask import Flask, request, session
from flask.sessions import SecureCookieSessionInterface
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.middleware.proxy_fix import ProxyFix
from sqlalchemy.orm import scoped_session
from acoustid.script import Script
from acoustid.db import get_session_args
from acoustid.web import db
from acoustid.web.views.general import general_page
from acoustid.web.views.user import user_page
......@@ -36,6 +37,8 @@ def make_application(config_filename=None, tests=False):
GOOGLE_OAUTH_CLIENT_ID=config.website.google_oauth_client_id,
GOOGLE_OAUTH_CLIENT_SECRET=config.website.google_oauth_client_secret,
)
if tests:
app.config['TESTING'] = True
app.acoustid_script = script
app.acoustid_config = config
......@@ -74,6 +77,11 @@ def make_application(config_filename=None, tests=False):
except RuntimeError:
return 0
def get_test_request_scope():
return 0
session_scope_func = get_test_request_scope if tests else get_flask_request_scope
@app.teardown_request
def close_db_session(*args, **kwargs):
db.session.remove()
......@@ -88,8 +96,8 @@ def make_application(config_filename=None, tests=False):
from acoustid.api import get_health_response
return get_health_response(script, request)
db.session_factory.configure(bind=config.database.create_engine())
db.session = scoped_session(db.session_factory, scopefunc=get_flask_request_scope)
db.session_factory.configure(**get_session_args(script))
db.session = scoped_session(db.session_factory, scopefunc=session_scope_func)
app.register_blueprint(general_page)
app.register_blueprint(user_page)
......
......@@ -7,7 +7,11 @@ consider donating a small amount of money. Your donation will
be used to support further development of the project and to
make sure the service stays both reliable and free.
The easiest way to donate is via PayPal, simply click the button below. Thank you!
The best way to donate is using Patreon:
<a href="https://www.patreon.com/bePatron?u=21057764" data-patreon-widget-type="become-patron-button">Become a Patron!</a>
Alternatively, you can also donate using PayPal:
<form action="https://www.paypal.com/cgi-bin/webscr" method="post" target="_top">
<input type="hidden" name="cmd" value="_s-xclick">
......
......@@ -117,6 +117,7 @@
<script src="//ajax.googleapis.com/ajax/libs/jquery/{{ jquery_version }}/jquery.min.js"></script>
<script src="//maxcdn.bootstrapcdn.com/bootstrap/{{ bootstrap_version }}/js/bootstrap.min.js"></script>
<script async src="https://c6.patreon.com/becomePatronButton.bundle.js"></script>
{% block extra_js %}{% endblock %}
</body>
......
......@@ -33,7 +33,7 @@ def render_page(name, **context):
text = file.read().decode('utf8')
text = render_template_string(text, **context)
md = Markdown(extensions=['meta'])
md.treeprocessors["flask_links"] = MarkdownFlaskUrlProcessor(md)
md.treeprocessors.register(MarkdownFlaskUrlProcessor(md), 'flask_links', 50)
html = md.convert(text)
title = ' '.join(md.Meta.get('title', []))
return render_template('page.html', content=html, title=title)
......
......@@ -17,7 +17,7 @@ fi
echo "GIT_RELEASE = '$GIT_RELEASE'" > acoustid/_release.py
docker pull $IMAGE:$PREV_VERSION
docker pull $IMAGE:$PREV_VERSION || true
docker build --cache-from=$IMAGE:$PREV_VERSION -t $IMAGE:$VERSION .
docker push $IMAGE:$VERSION
......
CREATE DATABASE "acoustid";
CREATE DATABASE "acoustid_test";
CREATE DATABASE "acoustid_import";
CREATE DATABASE "acoustid_import_test";
\c acoustid
create extension intarray;
create extension pgcrypto;
......@@ -12,3 +15,15 @@ create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
\c acoustid_import
create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
\c acoustid_import_test
create extension intarray;
create extension pgcrypto;
create extension acoustid;
create extension cube;
......@@ -2,8 +2,10 @@
set -ex
/wait-for-it.sh $ACOUSTID_TEST_INDEX_HOST:6080
/wait-for-it.sh $ACOUSTID_TEST_REDIS_HOST:6379
/wait-for-it.sh $ACOUSTID_TEST_POSTGRES_HOST:5432
/wait-for-it.sh $ACOUSTID_TEST_DB_DEFAULT_HOST:5432
/wait-for-it.sh $ACOUSTID_TEST_DB_SLOW_HOST:5432
export PIP_CACHE_DIR=`pwd`/pip-cache
......
......@@ -29,6 +29,8 @@ script_location = alembic
# are written from script.py.mako
# output_encoding = utf-8
databases = main, import
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
......
Generic single-database configuration.
\ No newline at end of file
AcoustID's multi-database configuration.
from __future__ import with_statement
import os
import logging
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig
config = context.config
fileConfig(config.config_file_name)
logger = logging.getLogger("alembic.env")
import acoustid.tables
target_metadata = acoustid.tables.metadata
......@@ -17,12 +19,25 @@ acoustid_config = acoustid.config.Config()
acoustid_config.read(acoustid_config_filename)
acoustid_config.read_env()
def include_object(obj, name, type, reflected, compare_to):
if type == "table" and obj.schema == "musicbrainz":
use_two_phase_commit = acoustid_config.databases.use_two_phase_commit
def include_object(db_name):
def inner(obj, name, obj_type, reflected, compare_to):
if obj_type == "table":
if obj.schema == "musicbrainz":
return False
bind_key = obj.info.get('bind_key', 'main')
if bind_key != db_name:
return False
if type == "column" and not obj.table.schema == "musicbrainz":
if obj_type == "column":
if obj.table.schema == "musicbrainz":
return False
bind_key = obj.table.info.get('bind_key', 'main')
if bind_key != db_name:
return False
return True
return inner
def run_migrations_offline():
......@@ -37,13 +52,18 @@ def run_migrations_offline():
script output.
"""
url = acoustid_config.database.create_url()
for name, db_config in acoustid_config.databases.databases.items():
logger.info("Migrating database %s" % name)
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True,
include_object=include_object)
url=db_config.create_url(),
target_metadata=target_metadata,
literal_binds=True,
include_object=include_object(name),
)
with context.begin_transaction():
context.run_migrations()
context.run_migrations(engine_name=name)
def run_migrations_online():
......@@ -53,17 +73,45 @@ def run_migrations_online():
and associate a connection with the context.
"""
connectable = acoustid_config.database.create_engine(poolclass=pool.NullPool)
with connectable.connect() as connection:
engines = {}
for name, db_config in acoustid_config.databases.databases.items():
engines[name] = rec = {}
rec["engine"] = db_config.create_engine(poolclass=pool.NullPool)
for name, rec in engines.items():
engine = rec["engine"]
rec["connection"] = conn = engine.connect()
if use_two_phase_commit:
rec["transaction"] = conn.begin_twophase()
else:
rec["transaction"] = conn.begin()
try:
for name, rec in engines.items():
logger.info("Migrating database %s" % name)
context.configure(
connection=connection,
connection=rec["connection"],
upgrade_token="%s_upgrades" % name,
downgrade_token="%s_downgrades" % name,
target_metadata=target_metadata,
include_object=include_object,
include_object=include_object(name),
)
with context.begin_transaction():
context.run_migrations()
context.run_migrations(engine_name=name)
if use_two_phase_commit:
for rec in engines.values():
rec["transaction"].prepare()
for rec in engines.values():
rec["transaction"].commit()
except:
for rec in engines.values():
rec["transaction"].rollback()
raise
finally:
for rec in engines.values():
rec["connection"].close()
if context.is_offline_mode():
......
"""${message}
<%!
import re
%>"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
......@@ -12,13 +18,20 @@ down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
${upgrades if upgrades else "pass"}
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
% for db_name in re.split(r',\s*', config.get_main_option("databases")):
def upgrade_${db_name}():
${context.get("%s_upgrades" % db_name, "pass")}
def downgrade():
${downgrades if downgrades else "pass"}
def downgrade_${db_name}():
${context.get("%s_downgrades" % db_name, "pass")}
% endfor
......@@ -16,7 +16,15 @@ from alembic import op
import sqlalchemy as sa
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_main():
op.create_foreign_key(op.f('account_google_fk_account_id'), 'account_google', 'account', ['account_id'], ['id'])
op.create_foreign_key(op.f('application_fk_account_id'), 'application', 'account', ['account_id'], ['id'])
op.create_foreign_key(op.f('stats_lookups_fk_application_id'), 'stats_lookups', 'application', ['application_id'], ['id'])
......@@ -46,7 +54,7 @@ def upgrade():
op.alter_column('recording_acoustid', 'created', nullable=False)
def downgrade():
def downgrade_main():
op.alter_column('recording_acoustid', 'created', nullable=True)
op.alter_column('track_foreignid_source', 'created', nullable=True)
op.alter_column('track_foreignid', 'created', nullable=True)
......@@ -74,3 +82,11 @@ def downgrade():
op.drop_constraint(op.f('stats_lookups_fk_application_id'), 'stats_lookups', type_='foreignkey')
op.drop_constraint(op.f('application_fk_account_id'), 'application', type_='foreignkey')
op.drop_constraint(op.f('account_google_fk_account_id'), 'account_google', type_='foreignkey')
def upgrade_import():
pass
def downgrade_import():
pass
"""submission result
Revision ID: 4581a68fa644
Revises: d52d50968cf7
Create Date: 2019-06-08 12:17:40.208947
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '4581a68fa644'
down_revision = 'd52d50968cf7'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_main():
pass
def downgrade_main():
pass
def upgrade_import():
op.create_table('submission_result',
sa.Column('submission_id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_main=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('application_version', sa.String(), nullable=True),
sa.Column('fingerprint_id', sa.Integer(), nullable=False),
sa.Column('track_id', sa.Integer(), nullable=False),
sa.Column('meta_id', sa.Integer(), nullable=True),
sa.Column('mbid', postgresql.UUID(), nullable=True),
sa.Column('puid', postgresql.UUID(), nullable=True),
sa.Column('foreignid', sa.String(), nullable=True),
sa.PrimaryKeyConstraint('submission_id', name=op.f('submission_result_pkey'))
)
def downgrade_import():
op.drop_table('submission_result')
This diff is collapsed.
......@@ -16,7 +16,15 @@ from alembic import op
import sqlalchemy as sa
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_main():
op.drop_constraint(u'fingerprint_source_fk_submission_id', 'fingerprint_source', type_='foreignkey')
op.drop_constraint(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', type_='foreignkey')
op.drop_constraint(u'track_mbid_source_fk_submission_id', 'track_mbid_source', type_='foreignkey')
......@@ -24,9 +32,17 @@ def upgrade():
op.drop_constraint(u'track_puid_source_fk_submission_id', 'track_puid_source', type_='foreignkey')
def downgrade():
def downgrade_main():
op.create_foreign_key(u'track_puid_source_fk_submission_id', 'track_puid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_meta_source_fk_submission_id', 'track_meta_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_mbid_source_fk_submission_id', 'track_mbid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'track_foreignid_source_fk_submission_id', 'track_foreignid_source', 'submission', ['submission_id'], ['id'])
op.create_foreign_key(u'fingerprint_source_fk_submission_id', 'fingerprint_source', 'submission', ['submission_id'], ['id'])
def upgrade_import():
pass
def downgrade_import():
pass
"""move submissions
Revision ID: d52d50968cf7
Revises: ae7e1e5763ef
Create Date: 2019-06-08 11:04:53.563059
"""
import datetime
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence
from dateutil.relativedelta import relativedelta
# revision identifiers, used by Alembic.
revision = 'd52d50968cf7'
down_revision = 'ae7e1e5763ef'
branch_labels = None
depends_on = None
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_main():
op.rename_table('submission', 'submission_old')
def downgrade_main():
op.rename_table('submission_old', 'submission')
def upgrade_import():
op.execute(CreateSequence(Sequence('submission_id_seq')))
op.create_table('submission',
sa.Column('id', sa.Integer(), server_main=sa.text("nextval('submission_id_seq')"), nullable=False),
sa.Column('created', sa.DateTime(timezone=True), server_main=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('handled', sa.Boolean(), server_main=sa.text('false'), nullable=True),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('application_version', sa.String(), nullable=True),
sa.Column('fingerprint', postgresql.ARRAY(sa.Integer()), nullable=False),
sa.Column('duration', sa.Integer(), nullable=False),
sa.Column('bitrate', sa.Integer(), nullable=True),
sa.Column('format', sa.String(), nullable=True),
sa.Column('mbid', postgresql.UUID(), nullable=True),
sa.Column('puid', postgresql.UUID(), nullable=True),
sa.Column('foreignid', sa.String(), nullable=True),
sa.Column('track', sa.String(), nullable=True),
sa.Column('artist', sa.String(), nullable=True),
sa.Column('album', sa.String(), nullable=True),
sa.Column('album_artist', sa.String(), nullable=True),
sa.Column('track_no', sa.Integer(), nullable=True),
sa.Column('disc_no', sa.Integer(), nullable=True),
sa.Column('year', sa.Integer(), nullable=True),
postgresql_partition_by='RANGE (created)',
)
one_month = relativedelta(months=1)
partitions_from = datetime.date(2010, 1, 1)
partitions_to = datetime.date.today().replace(month=1, day=1) + relativedelta(months=12)
range_from = partitions_from
while range_from < partitions_to:
range_to = range_from + one_month
op.execute("""
CREATE TABLE IF NOT EXISTS submission_y{range_from.year:04d}m{range_to.month:02d}
PARTITION OF submission
FOR VALUES FROM ('{range_from}') TO ('{range_to}');
ALTER TABLE submission_y{range_from.year:04d}m{range_to.month:02d}
ADD PRIMARY KEY (id);
CREATE INDEX submission_y{range_from.year:04d}m{range_to.month:02d}_idx_handled
ON submission_y{range_from.year:04d}m{range_to.month:02d} (handled);
""".format(range_from=range_from, range_to=range_to))
range_from = range_to
def downgrade_import():
op.drop_table('submission')
op.execute(DropSequence(Sequence('submission_id_seq')))
......@@ -16,11 +16,27 @@ from alembic import op
import sqlalchemy as sa
def upgrade():
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_main():
op.add_column('account',
sa.Column('is_admin', sa.Boolean(), server_default=sa.text('false'), nullable=False)
sa.Column('is_admin', sa.Boolean(), server_main=sa.text('false'), nullable=False)
)
def downgrade():
def downgrade_main():
op.drop_column('account', 'is_admin')
def upgrade_import():
pass
def downgrade_import():
pass
......@@ -6,8 +6,14 @@ services:
ports:
- "127.0.0.1:6379:6379"
index:
image: quay.io/acoustid/acoustid-index:master
ports:
- "127.0.0.1:6080:6080"
postgres:
image: quay.io/acoustid/postgresql:master
command: ["-c", "max_connections=100", "-c", "max_prepared_transactions=110"]
ports:
- "127.0.0.1:5432:5432"
volumes:
......
This diff is collapsed.
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
from nose.tools import assert_equals
from acoustid.api import serialize_response
def test_serialize_json():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'json')
assert_equals('application/json; charset=UTF-8', resp.content_type)
expected = b'''{"artists": [{"cities": ["Paris", "Lyon"], "name": "Jean Michel Jarre", "year": 1948}], "status": "ok"}'''
assert_equals(expected, resp.data)
def test_serialize_jsonp():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'jsonp:getData')
assert_equals('application/javascript; charset=UTF-8', resp.content_type)
expected = b'''getData({"artists": [{"cities": ["Paris", "Lyon"], "name": "Jean Michel Jarre", "year": 1948}], "status": "ok"})'''
assert_equals(expected, resp.data)
def test_serialize_xml():
data = {'status': 'ok', 'artists': [{'name': 'Jean Michel Jarre', 'year': 1948, 'cities': ['Paris', 'Lyon']}]}
resp = serialize_response(data, 'xml')
assert_equals('text/xml; charset=UTF-8', resp.content_type)
expected = (
'''<?xml version='1.0' encoding='UTF-8'?>\n<response><status>ok</status><artists><artist><cities><city>Paris</city><city>Lyon'''
'''</city></cities><name>Jean Michel Jarre</name><year>1948</year></artist></artists></response>'''
)
assert_equals(expected, resp.data)
def test_serialize_xml_attribute():
data = {'@status': 'ok'}
resp = serialize_response(data, 'xml')
assert_equals('text/xml; charset=UTF-8', resp.content_type)
expected = '''<?xml version='1.0' encoding='UTF-8'?>\n<response status="ok" />'''
assert_equals(expected, resp.data)
from nose.tools import assert_equal
from acoustid.web import db
from tests import make_web_application
app = None
def setup():
global app
app = make_web_application()
app.config['TESTING'] = True
def test_track_page():
client = app.test_client()
rv = client.get('/track/eb31d1c3-950e-468b-9e36-e46fa75b1291')
assert_equal(rv.status_code, 200)
assert_equal(rv.data.count('Custom Track'), 1)
assert_equal(rv.data.count('Custom Artist'), 1)
assert not db.session.registry.has()
......@@ -4,4 +4,26 @@
#
# pip-compile --output-file requirements_dev.txt requirements_dev.in
#
nose==1.3.7
atomicwrites==1.3.0 # via pytest
attrs==19.1.0 # via pytest
backports.functools-lru-cache==1.5 # via soupsieve
beautifulsoup4==4.7.1 # via webtest
configparser==3.7.4 # via importlib-metadata
contextlib2==0.5.5 # via importlib-metadata
funcsigs==1.0.2 # via pytest
importlib-metadata==0.17 # via pluggy, pytest
more-itertools==5.0.0 # via pytest
packaging==19.0 # via pytest
pathlib2==2.3.3 # via importlib-metadata, pytest
pluggy==0.12.0 # via pytest
py==1.8.0 # via pytest
pyparsing==2.4.0 # via packaging
pytest==4.6.2
scandir==1.10.0 # via pathlib2
six==1.12.0 # via more-itertools, packaging, pathlib2, pytest, webtest
soupsieve==1.9.1 # via beautifulsoup4
waitress==1.3.0 # via webtest
wcwidth==0.1.7 # via pytest
webob==1.8.5 # via webtest
webtest==2.0.33
zipp==0.5.1 # via importlib-metadata
......@@ -160,7 +160,7 @@ def disable_track_mbid(db, track_mbid, note):
def main(script, opts, args):
db = DatabaseContext(script.engine)
db = DatabaseContext(script.db)
min_track_id = db.session.query(sql.func.min(Track.id)).scalar()
max_track_id = db.session.query(sql.func.max(Track.id)).scalar()
......
......@@ -8,7 +8,7 @@ import os
def main(script, opts, args):
os.execlp('psql', 'psql', *(script.config.database.create_psql_args() + args))
os.execlp('psql', 'psql', *(script.config.databases.databases['main'].create_psql_args() + args))
run_script(main)
This diff is collapsed.
import os
import pytest
from acoustid.config import str_to_bool
from acoustid.tables import metadata
from acoustid.data.fingerprint import update_fingerprint_index
from tests.data import create_sample_data
RECREATE_DB = True
def get_tables_for_bind(metadata, bind_key):
tables = []
for table in metadata.sorted_tables:
if table.info.get('bind_key', 'main') == bind_key:
tables.append(table)
return tables
def create_all(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
metadata.create_all(bind=conn, tables=tables)
def drop_all(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
metadata.drop_all(bind=conn, tables=tables)
def delete_from_all_tables(metadata, engines):
for bind_key, engine in engines.items():
tables = get_tables_for_bind(metadata, bind_key)
with engine.connect() as conn:
with conn.begin():
for table in reversed(tables):
conn.execute(table.delete())
def delete_shutdown_file(config):
if os.path.exists(config.website.shutdown_file_path):
os.remove(config.website.shutdown_file_path)
def prepare_database(script):
delete_from_all_tables(metadata, script.db_engines)
with script.context() as ctx:
create_sample_data(ctx)
ctx.db.flush()
update_fingerprint_index(ctx.db.connection(), ctx.index)
ctx.db.commit()
@pytest.fixture(scope='session')
def config_path():
return os.path.dirname(os.path.abspath(__file__)) + '/../acoustid-test.conf'
@pytest.fixture(scope='session')
def script_global(config_path):
from acoustid.script import Script
script = Script(config_path, tests=True)
recreate_db = str_to_bool(os.environ.get('ACOUSTID_TEST_RECREATE', ''))
if recreate_db:
drop_all(metadata, script.db_engines)
create_all(metadata, script.db_engines)
prepare_database(script)