diff --git a/.babelrc b/.babelrc
index c1d5546da..a29ac9986 100644
--- a/.babelrc
+++ b/.babelrc
@@ -1,3 +1,5 @@
{
- "presets": ["es2015", "stage-2"],
+ "presets": [
+ "@babel/preset-env"
+ ]
}
diff --git a/.dockerignore b/.dockerignore
index 278aeff46..e14dab19c 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -1,7 +1,6 @@
.git
# Docker configuration
-/docker
# Byte-compiled / optimized / DLL files
__pycache__/
diff --git a/Dockerfile b/Dockerfile
index fc060c633..94b0c0640 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM metabrainz/python:3.7
+FROM metabrainz/python:3.8
ARG DEPLOY_ENV
@@ -18,7 +18,7 @@ RUN apt-get update \
# PostgreSQL client
RUN apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8
-ENV PG_MAJOR 9.5
+ENV PG_MAJOR 12
RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ jessie-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list
RUN apt-get update \
&& apt-get install -y postgresql-client-$PG_MAJOR \
@@ -27,27 +27,27 @@ RUN apt-get update \
ENV PGPASSWORD "critiquebrainz"
# Node
-RUN curl -sL https://deb.nodesource.com/setup_6.x | bash -
-RUN apt-get install -y nodejs
+RUN curl -sL https://deb.nodesource.com/setup_12.x | bash - \
+ && apt-get install -y nodejs \
+ && rm -rf /var/lib/apt/lists/*
-RUN pip install uWSGI==2.0.13.1
+RUN pip install --no-cache-dir uWSGI==2.0.18
RUN mkdir /code
WORKDIR /code
# Python dependencies
COPY ./requirements.txt /code/
-RUN pip install -r requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
# Node dependencies
-COPY ./package.json /code/
-COPY ./npm-shrinkwrap.json /code/
+COPY ./package.json ./package-lock.json /code/
RUN npm install
COPY . /code/
# Build static files
-RUN ./node_modules/.bin/gulp
+RUN npm run build
# Compile translations
RUN pybabel compile -d critiquebrainz/frontend/translations
diff --git a/Dockerfile.webpack b/Dockerfile.webpack
new file mode 100644
index 000000000..0daafd3c9
--- /dev/null
+++ b/Dockerfile.webpack
@@ -0,0 +1,7 @@
+FROM node:12
+
+RUN mkdir /code
+WORKDIR /code
+
+COPY package.json package-lock.json webpack.config.js /code/
+RUN npm install
diff --git a/README.md b/README.md
index 72bde5d2f..b2b013bad 100644
--- a/README.md
+++ b/README.md
@@ -4,12 +4,11 @@ CritiqueBrainz is a repository for Creative Commons licensed reviews.
## Documentation
-**Documentation is available at https://critiquebrainz.readthedocs.org/.**
+Documentation is available at https://critiquebrainz.readthedocs.io/.
### Building documentation
-In case you want to build documentation yourself follow the following steps
-*(make sure that you use Python 3)*:
+If you want to build documentation yourself, follow these steps using python 3
$ cd ./docs
$ pip3 install -r requirements.txt
diff --git a/admin/compile_resources.sh b/admin/compile_resources.sh
deleted file mode 100755
index 7bc415f7d..000000000
--- a/admin/compile_resources.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-
-cd "$(dirname $0)/../"
-node_modules/.bin/gulp $@
diff --git a/admin/schema_changes/16.sql b/admin/schema_changes/16.sql
new file mode 100644
index 000000000..b1f4e5377
--- /dev/null
+++ b/admin/schema_changes/16.sql
@@ -0,0 +1 @@
+ALTER TYPE entity_types ADD VALUE 'artist' AFTER 'place';
diff --git a/admin/schema_changes/17.sql b/admin/schema_changes/17.sql
new file mode 100644
index 000000000..5842ba030
--- /dev/null
+++ b/admin/schema_changes/17.sql
@@ -0,0 +1 @@
+ALTER TYPE entity_types ADD VALUE 'label' AFTER 'artist';
diff --git a/admin/schema_changes/19.sql b/admin/schema_changes/19.sql
new file mode 100644
index 000000000..9d7c8e1c5
--- /dev/null
+++ b/admin/schema_changes/19.sql
@@ -0,0 +1 @@
+ALTER TYPE entity_types ADD VALUE 'work' AFTER 'recording';
diff --git a/admin/sql/create_types.sql b/admin/sql/create_types.sql
index 313a4e971..27a364f85 100644
--- a/admin/sql/create_types.sql
+++ b/admin/sql/create_types.sql
@@ -8,5 +8,8 @@ CREATE TYPE action_types AS ENUM (
CREATE TYPE entity_types AS ENUM (
'release_group',
'event',
- 'place'
+ 'place',
+ 'work',
+ 'artist',
+ 'label'
);
diff --git a/critiquebrainz/data/dump_manager.py b/critiquebrainz/data/dump_manager.py
index 4ea5b1c32..b31e65818 100644
--- a/critiquebrainz/data/dump_manager.py
+++ b/critiquebrainz/data/dump_manager.py
@@ -1,24 +1,25 @@
-from time import gmtime, strftime
-from datetime import datetime
-import subprocess
-import tempfile
-import tarfile
-import shutil
import errno
-import sys
import os
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+from datetime import datetime
+from time import gmtime, strftime
+
+import click
+import sqlalchemy
from flask import current_app, jsonify
from flask.json import JSONEncoder
-import sqlalchemy
-import click
+from psycopg2.sql import SQL, Identifier
+
+from critiquebrainz import db
from critiquebrainz.data.utils import create_path, remove_old_archives, slugify, explode_db_uri, with_request_context
from critiquebrainz.db import license as db_license, review as db_review
-from critiquebrainz import db
-
cli = click.Group()
-
_TABLES = {
"review": (
"id",
@@ -66,6 +67,12 @@
"rating",
"count",
),
+ "vote": (
+ "user_id",
+ "revision_id",
+ "vote",
+ "rated_at",
+ ),
}
@@ -104,7 +111,7 @@ def full_db(location, rotate=False):
# More info about it is available at http://www.postgresql.org/docs/9.3/static/app-pgdump.html
dump_file = os.path.join(location, FILE_PREFIX + strftime("%Y%m%d-%H%M%S", gmtime()))
print('pg_dump -h "%s" -p "%s" -U "%s" -d "%s" -Ft > "%s.tar"' %
- (db_hostname, db_port, db_username, db_name, dump_file),)
+ (db_hostname, db_port, db_username, db_name, dump_file), )
result = subprocess.call(
'pg_dump -h "%s" -p "%s" -U "%s" -d "%s" -Ft > "%s.tar"' %
(db_hostname, db_port, db_username, db_name, dump_file),
@@ -159,7 +166,8 @@ def json(location, rotate=False):
entity = str(entity)
# Creating directory structure and dumping reviews
dir_part = os.path.join(entity[0:1], entity[0:2])
- reviews = db_review.get_reviews_list(connection, entity_id=entity, license_id=license["id"], limit=None)[0]
+ reviews = db_review.get_reviews_list(connection, entity_id=entity,
+ license_id=license["id"], limit=None)[0]
if reviews:
rg_dir = '%s/%s' % (license_dir, dir_part)
create_path(rg_dir)
@@ -174,7 +182,8 @@ def json(location, rotate=False):
tar.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), "licenses", safe_name + ".txt"),
arcname='COPYING')
- print(" + %s/critiquebrainz-%s-%s-json.tar.bz2" % (location, datetime.today().strftime('%Y%m%d'), safe_name))
+ print(" + %s/critiquebrainz-%s-%s-json.tar.bz2" % (
+ location, datetime.today().strftime('%Y%m%d'), safe_name))
shutil.rmtree(temp_dir) # Cleanup
@@ -307,7 +316,8 @@ def create_base_archive(connection, *, location, meta_files_dir=None):
# Including additional information about this archive
# Copying the most restrictive license there (CC BY-NC-SA 3.0)
- tar.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), "licenses", "cc-by-nc-sa-30.txt"), arcname='COPYING')
+ tar.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), "licenses", "cc-by-nc-sa-30.txt"),
+ arcname='COPYING')
# Copy meta files
if not meta_files_dir:
prepare_meta_files(temp_dir)
@@ -364,6 +374,23 @@ def create_reviews_archive(connection, *, location, meta_files_dir=None, license
license_where_clause=license_where_clause,
)
+ VOTE_SQL = """(
+ SELECT {columns}
+ FROM vote
+ JOIN ( SELECT revision.id
+ FROM revision
+ JOIN review
+ ON review.id = revision.review_id
+ WHERE review.is_hidden = false
+ AND review.is_draft = false
+ {license_where_clause}
+ ) AS rev
+ ON vote.revision_id = rev.id
+ )""".format(
+ columns=', '.join(['vote.' + column for column in _TABLES['vote']]),
+ license_where_clause=license_where_clause,
+ )
+
with tarfile.open(os.path.join(location, archive_name), "w:bz2") as tar:
# Dumping tables
temp_dir = tempfile.mkdtemp()
@@ -380,6 +407,10 @@ def create_reviews_archive(connection, *, location, meta_files_dir=None, license
with open(os.path.join(reviews_tables_dir, 'avg_rating'), 'w') as f:
cursor.copy_to(f, "(SELECT {columns} FROM avg_rating)".format(columns=", ".join(_TABLES["avg_rating"])))
+
+ with open(os.path.join(reviews_tables_dir, 'vote'), 'w') as f:
+ cursor.copy_to(f, VOTE_SQL)
+
except Exception as e:
print("Error {} occurred while copying tables during the creation of the reviews archive!".format(e))
raise
@@ -389,7 +420,8 @@ def create_reviews_archive(connection, *, location, meta_files_dir=None, license
tar.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), "licenses", "cc-by-nc-sa-30.txt"),
arcname='COPYING')
else:
- tar.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), "licenses", safe_name + ".txt"), arcname='COPYING')
+ tar.add(os.path.join(os.path.dirname(os.path.realpath(__file__)), "licenses", safe_name + ".txt"),
+ arcname='COPYING')
if not meta_files_dir:
prepare_meta_files(temp_dir)
@@ -442,13 +474,16 @@ def importer(archive):
import_data(os.path.join(temp_dir, 'cbdump', 'review'), 'review')
import_data(os.path.join(temp_dir, 'cbdump', 'revision'), 'revision')
import_data(os.path.join(temp_dir, 'cbdump', 'avg_rating'), 'avg_rating')
+ import_data(os.path.join(temp_dir, 'cbdump', 'vote'), 'vote')
+
+ # Reset sequence values after importing dump
+ reset_sequence(['revision'])
shutil.rmtree(temp_dir) # Cleanup
print("Done!")
def import_data(file_name, table_name, columns=None):
-
connection = db.engine.raw_connection()
try:
cursor = connection.cursor()
@@ -475,6 +510,19 @@ def import_data(file_name, table_name, columns=None):
connection.close()
+def reset_sequence(table_names):
+ connection = db.engine.raw_connection()
+ try:
+ cursor = connection.cursor()
+ for table_name in table_names:
+ cursor.execute(
+ SQL("SELECT setval(pg_get_serial_sequence(%s, 'id'), coalesce(max(id),0) + 1, false) FROM {};")
+ .format(Identifier(table_name)), (table_name,))
+ connection.commit()
+ finally:
+ connection.close()
+
+
class DumpJSONEncoder(JSONEncoder):
"""Custom JSON encoder for database dumps."""
diff --git a/critiquebrainz/data/fixtures.py b/critiquebrainz/data/fixtures.py
index 09a55c3a9..181ad314d 100644
--- a/critiquebrainz/data/fixtures.py
+++ b/critiquebrainz/data/fixtures.py
@@ -37,4 +37,4 @@ class LicenseData:
# Include all objects into this tuple.
-all_data = (LicenseData, )
+all_data = (LicenseData,)
diff --git a/critiquebrainz/data/mixins.py b/critiquebrainz/data/mixins.py
index 5afc36a80..42debb8d0 100644
--- a/critiquebrainz/data/mixins.py
+++ b/critiquebrainz/data/mixins.py
@@ -4,6 +4,7 @@
class AdminMixin(UserMixin):
"""Allows a method to check if the current user is admin."""
+
def is_admin(self):
return self.musicbrainz_username in current_app.config['ADMINS']
diff --git a/critiquebrainz/data/testing.py b/critiquebrainz/data/testing.py
index 4daaef5cb..2eca88fa1 100644
--- a/critiquebrainz/data/testing.py
+++ b/critiquebrainz/data/testing.py
@@ -1,7 +1,9 @@
import os
+
from flask_testing import TestCase
-from critiquebrainz.frontend import create_app
+
from critiquebrainz.data.utils import create_all, drop_tables, drop_types
+from critiquebrainz.frontend import create_app
class DataTestCase(TestCase):
diff --git a/critiquebrainz/data/utils.py b/critiquebrainz/data/utils.py
index dce9b1b5b..a013bd8f7 100644
--- a/critiquebrainz/data/utils.py
+++ b/critiquebrainz/data/utils.py
@@ -1,11 +1,12 @@
-import urllib.parse
-import unicodedata
-import shutil
import errno
-import sys
import os
import re
+import shutil
+import sys
+import unicodedata
+import urllib.parse
from functools import wraps
+
from critiquebrainz import db
from critiquebrainz import frontend
@@ -102,11 +103,13 @@ def with_request_context(f):
def decorated(*args, **kwargs):
with frontend.create_app().test_request_context():
return f(*args, **kwargs)
+
return decorated
def with_test_request_context(f):
"""Decorator for providing request context for application using test_config.py."""
+
@wraps(f)
def decorated(*args, **kwargs):
with frontend.create_app(
@@ -114,4 +117,5 @@ def decorated(*args, **kwargs):
os.path.dirname(os.path.realpath(__file__)),
'..', 'test_config.py')).test_request_context():
return f(*args, **kwargs)
+
return decorated
diff --git a/critiquebrainz/data/utils_test.py b/critiquebrainz/data/utils_test.py
index 1e1899d67..76b134666 100644
--- a/critiquebrainz/data/utils_test.py
+++ b/critiquebrainz/data/utils_test.py
@@ -1,5 +1,5 @@
-from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.data import utils
+from critiquebrainz.data.testing import DataTestCase
class DataUtilsTestCase(DataTestCase):
diff --git a/critiquebrainz/db/avg_rating.py b/critiquebrainz/db/avg_rating.py
index 96436fb69..78576fbdf 100644
--- a/critiquebrainz/db/avg_rating.py
+++ b/critiquebrainz/db/avg_rating.py
@@ -1,4 +1,5 @@
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import exceptions as db_exceptions
diff --git a/critiquebrainz/db/avg_rating_test.py b/critiquebrainz/db/avg_rating_test.py
index b9131fad7..84c1b5260 100644
--- a/critiquebrainz/db/avg_rating_test.py
+++ b/critiquebrainz/db/avg_rating_test.py
@@ -1,10 +1,10 @@
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.users as db_users
-from critiquebrainz.db.user import User
-import critiquebrainz.db.review as db_review
import critiquebrainz.db.avg_rating as db_avg_rating
import critiquebrainz.db.exceptions as db_exceptions
import critiquebrainz.db.license as db_license
+import critiquebrainz.db.review as db_review
+import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
+from critiquebrainz.db.user import User
class AvgRatingTestCase(DataTestCase):
diff --git a/critiquebrainz/db/comment.py b/critiquebrainz/db/comment.py
index 0893f4219..f39ca562f 100644
--- a/critiquebrainz/db/comment.py
+++ b/critiquebrainz/db/comment.py
@@ -17,10 +17,10 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sqlalchemy
+
import critiquebrainz.db as db
import critiquebrainz.db.comment_revision as db_comment_revision
import critiquebrainz.db.exceptions as db_exceptions
-
from critiquebrainz.db.user import User
@@ -45,7 +45,7 @@ def create(*, user_id, text, review_id, is_draft=False):
'user_id': user_id,
'review_id': review_id,
'is_draft': is_draft,
- })
+ })
comment_id = result.fetchone()['id']
db_comment_revision.create(comment_id, text)
return get_by_id(comment_id)
@@ -98,7 +98,7 @@ def get_by_id(comment_id):
LIMIT 1
"""), {
'comment_id': comment_id,
- })
+ })
comment = result.fetchone()
if not comment:
@@ -232,7 +232,7 @@ def delete(comment_id):
WHERE id = :comment_id
"""), {
'comment_id': comment_id,
- })
+ })
def update(comment_id, *, text=None, is_draft=None, is_hidden=None):
diff --git a/critiquebrainz/db/comment_revision.py b/critiquebrainz/db/comment_revision.py
index 67db5e6fb..23b5e2597 100644
--- a/critiquebrainz/db/comment_revision.py
+++ b/critiquebrainz/db/comment_revision.py
@@ -17,6 +17,7 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import sqlalchemy
+
import critiquebrainz.db as db
@@ -38,6 +39,6 @@ def create(comment_id, text):
"""), {
'comment_id': comment_id,
'text': text,
- })
+ })
return result.fetchone()['id']
diff --git a/critiquebrainz/db/dump_manager_test.py b/critiquebrainz/db/dump_manager_test.py
index 49836a24d..8206573e5 100644
--- a/critiquebrainz/db/dump_manager_test.py
+++ b/critiquebrainz/db/dump_manager_test.py
@@ -1,12 +1,15 @@
import os
import tempfile
from datetime import datetime
+
from click.testing import CliRunner
-from critiquebrainz.data.testing import DataTestCase
-from critiquebrainz.data import utils
+
import critiquebrainz.db.license as db_license
-import critiquebrainz.db.users as db_users
import critiquebrainz.db.review as db_review
+import critiquebrainz.db.users as db_users
+import critiquebrainz.db.vote as db_vote
+from critiquebrainz.data import utils
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db.user import User
utils.with_request_context = utils.with_test_request_context # noqa
@@ -60,11 +63,16 @@ def test_public(self):
self.assertIn(f'cbdump-reviews-{self.license["id"]}.tar.bz2', archives)
def test_importer(self):
- user = User(db_users.get_or_create(1, "Tester", new_user_data={
- "display_name": "test user",
+ user_1 = User(db_users.get_or_create(1, "Tester_1", new_user_data={
+ "display_name": "test user_1",
}))
+ user_2 = User(db_users.get_or_create(2, "Tester_2", new_user_data={
+ "display_name": "test user_2",
+ }))
+
+ # user_1 adds a review
review = db_review.create(
- user_id=user.id,
+ user_id=user_1.id,
entity_id="e7aad618-fa86-3983-9e77-405e21796eca",
entity_type="release_group",
text="Testing",
@@ -72,18 +80,23 @@ def test_importer(self):
is_draft=False,
license_id=self.license["id"],
)
+ # user_2 votes on review by user_1
+ db_vote.submit(user_2.id, review["last_revision"]["id"], True)
# Make dumps and delete entities
self.runner.invoke(dump_manager.public, ['--location', self.tempdir])
archives = get_archives(self.tempdir)
db_review.delete(review['id'])
- db_users.delete(user.id)
+ db_users.delete(user_1.id)
+ db_users.delete(user_2.id)
self.assertEqual(db_users.total_count(), 0)
self.assertEqual(db_review.get_count(), 0)
+ self.assertEqual(db_vote.get_count(), 0)
# Import dumps - cbdump.tar.bz2 and cbdump-reviews-all.tar.bz2 and check if data imported properly
self.runner.invoke(dump_manager.importer, [archives['cbdump.tar.bz2']])
- self.assertEqual(db_users.total_count(), 1)
+ self.assertEqual(db_users.total_count(), 2)
self.runner.invoke(dump_manager.importer, [archives['cbdump-reviews-all.tar.bz2']])
self.assertEqual(db_review.get_count(), 1)
+ self.assertEqual(db_vote.get_count(), 1)
diff --git a/critiquebrainz/db/license.py b/critiquebrainz/db/license.py
index 00fb30215..91063f2f3 100644
--- a/critiquebrainz/db/license.py
+++ b/critiquebrainz/db/license.py
@@ -1,4 +1,5 @@
import sqlalchemy
+
from critiquebrainz import db
diff --git a/critiquebrainz/db/license_test.py b/critiquebrainz/db/license_test.py
index 97de5ecf5..e4088e3fb 100644
--- a/critiquebrainz/db/license_test.py
+++ b/critiquebrainz/db/license_test.py
@@ -1,5 +1,5 @@
-from critiquebrainz.data.testing import DataTestCase
import critiquebrainz.db.license as db_license
+from critiquebrainz.data.testing import DataTestCase
class LicenseTestCase(DataTestCase):
diff --git a/critiquebrainz/db/moderation_log.py b/critiquebrainz/db/moderation_log.py
index 80465a1b5..03c6884f4 100644
--- a/critiquebrainz/db/moderation_log.py
+++ b/critiquebrainz/db/moderation_log.py
@@ -4,7 +4,9 @@
"""
from datetime import datetime
from enum import Enum
+
import sqlalchemy
+
from critiquebrainz import db
@@ -16,6 +18,10 @@ class AdminActions(Enum):
ACTION_BLOCK_USER = "block_user"
ACTION_UNBLOCK_USER = "unblock_user"
+ @classmethod
+ def get_all_actions(cls):
+ return list(cls)
+
def create(*, admin_id, review_id=None, user_id=None,
action, reason):
@@ -30,7 +36,7 @@ def create(*, admin_id, review_id=None, user_id=None,
"""
if not review_id and not user_id:
raise ValueError("No review ID or user ID specified.")
- if action not in AdminActions:
+ if action not in AdminActions.get_all_actions():
raise ValueError("Please specify a valid action.")
with db.engine.connect() as connection:
connection.execute(sqlalchemy.text("""
diff --git a/critiquebrainz/db/moderation_log_test.py b/critiquebrainz/db/moderation_log_test.py
index 9f007e3af..6a0417e67 100644
--- a/critiquebrainz/db/moderation_log_test.py
+++ b/critiquebrainz/db/moderation_log_test.py
@@ -1,9 +1,9 @@
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.moderation_log as db_moderation_log
-from critiquebrainz.db.moderation_log import AdminActions
-import critiquebrainz.db.users as db_users
import critiquebrainz.db.license as db_license
+import critiquebrainz.db.moderation_log as db_moderation_log
import critiquebrainz.db.review as db_review
+import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
+from critiquebrainz.db.moderation_log import AdminActions
from critiquebrainz.db.user import User
diff --git a/critiquebrainz/db/oauth_client.py b/critiquebrainz/db/oauth_client.py
index 36c542cf6..35db176a6 100644
--- a/critiquebrainz/db/oauth_client.py
+++ b/critiquebrainz/db/oauth_client.py
@@ -1,9 +1,9 @@
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import exceptions as db_exceptions
from critiquebrainz.utils import generate_string
-
CLIENT_ID_LENGTH = 20
CLIENT_SECRET_LENGTH = 40
diff --git a/critiquebrainz/db/oauth_client_test.py b/critiquebrainz/db/oauth_client_test.py
index 3129e6d65..a7da77038 100644
--- a/critiquebrainz/db/oauth_client_test.py
+++ b/critiquebrainz/db/oauth_client_test.py
@@ -1,6 +1,6 @@
-from critiquebrainz.data.testing import DataTestCase
import critiquebrainz.db.oauth_client as db_oauth_client
import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db.exceptions import NoDataFoundException
from critiquebrainz.db.user import User
diff --git a/critiquebrainz/db/oauth_grant.py b/critiquebrainz/db/oauth_grant.py
index a07d438d4..f48e96a02 100644
--- a/critiquebrainz/db/oauth_grant.py
+++ b/critiquebrainz/db/oauth_grant.py
@@ -1,4 +1,5 @@
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import exceptions as db_exceptions
diff --git a/critiquebrainz/db/oauth_grant_test.py b/critiquebrainz/db/oauth_grant_test.py
index 66e06b1b3..4b55cf16d 100644
--- a/critiquebrainz/db/oauth_grant_test.py
+++ b/critiquebrainz/db/oauth_grant_test.py
@@ -1,9 +1,10 @@
from datetime import datetime, timedelta
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.oauth_grant as db_oauth_grant
+
+import critiquebrainz.db.exceptions as db_exceptions
import critiquebrainz.db.oauth_client as db_oauth_client
+import critiquebrainz.db.oauth_grant as db_oauth_grant
import critiquebrainz.db.users as db_users
-import critiquebrainz.db.exceptions as db_exceptions
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db.user import User
@@ -45,7 +46,8 @@ def test_list(self):
scopes=None,
)
self.assertEqual(len(db_oauth_grant.list_grants(client_id=self.oauth_client["client_id"])), 1)
- self.assertEqual(len(db_oauth_grant.list_grants(client_id=self.oauth_client["client_id"], code=oauth_grant["code"])), 1)
+ self.assertEqual(
+ len(db_oauth_grant.list_grants(client_id=self.oauth_client["client_id"], code=oauth_grant["code"])), 1)
def test_delete(self):
oauth_grant = db_oauth_grant.create(
diff --git a/critiquebrainz/db/oauth_token.py b/critiquebrainz/db/oauth_token.py
index fb497d331..2398074d6 100644
--- a/critiquebrainz/db/oauth_token.py
+++ b/critiquebrainz/db/oauth_token.py
@@ -1,4 +1,5 @@
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import exceptions as db_exceptions
diff --git a/critiquebrainz/db/oauth_token_test.py b/critiquebrainz/db/oauth_token_test.py
index b6c42f4d6..eea4728c0 100644
--- a/critiquebrainz/db/oauth_token_test.py
+++ b/critiquebrainz/db/oauth_token_test.py
@@ -1,9 +1,10 @@
from datetime import datetime, timedelta
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.oauth_token as db_oauth_token
+
+import critiquebrainz.db.exceptions as db_exceptions
import critiquebrainz.db.oauth_client as db_oauth_client
+import critiquebrainz.db.oauth_token as db_oauth_token
import critiquebrainz.db.users as db_users
-import critiquebrainz.db.exceptions as db_exceptions
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db.user import User
diff --git a/critiquebrainz/db/review.py b/critiquebrainz/db/review.py
index afc05f4dc..d04a7a7b0 100644
--- a/critiquebrainz/db/review.py
+++ b/critiquebrainz/db/review.py
@@ -1,9 +1,11 @@
-from random import shuffle
-from datetime import datetime, timedelta
import uuid
-import sqlalchemy
+from datetime import datetime, timedelta
+from random import shuffle
+
import pycountry
+import sqlalchemy
from brainzutils import cache
+
from critiquebrainz import db
from critiquebrainz.db import (exceptions as db_exceptions,
revision as db_revision,
@@ -19,9 +21,11 @@
"event",
"place",
"release_group",
+ "work",
+ "artist",
+ "label",
]
-
supported_languages = []
for lang in list(pycountry.languages):
if 'iso639_1_code' in dir(lang):
@@ -233,12 +237,12 @@ def update(review_id, *, drafted, text=None, rating=None, license_id=None, langu
WHERE id = :review_id
""".format(setstr=setstr))
- if setstr:
- updated_info["review_id"] = review_id
- with db.engine.connect() as connection:
+ with db.engine.connect() as connection:
+ if setstr:
+ updated_info["review_id"] = review_id
connection.execute(query, updated_info)
-
- db_revision.create(review_id, text, rating)
+ db_revision.create(connection, review_id, text, rating)
+ db_revision.update_rating(review_id)
cache.invalidate_namespace(REVIEW_CACHE_NAMESPACE)
@@ -317,9 +321,11 @@ def create(*, entity_id, entity_type, user_id, is_draft, text=None, rating=None,
"published_on": published_on,
})
review_id = result.fetchone()[0]
- # TODO(roman): It would be better to create review and revision in one transaction
- db_revision.create(review_id, text, rating)
- cache.invalidate_namespace(REVIEW_CACHE_NAMESPACE)
+ db_revision.create(connection, review_id, text, rating)
+ if rating:
+ db_revision.update_rating(review_id)
+
+ cache.invalidate_namespace(REVIEW_CACHE_NAMESPACE)
return get_by_id(review_id)
@@ -534,8 +540,9 @@ def get_popular(limit=None):
cache_key = cache.gen_key("popular_reviews", limit)
reviews = cache.get(cache_key, REVIEW_CACHE_NAMESPACE)
defined_limit = 4 * limit if limit else None
+ reset_cache = any([check_review_deleted(review["id"]) for review in reviews]) if reviews else False
- if not reviews:
+ if not reviews or reset_cache:
with db.engine.connect() as connection:
results = connection.execute(sqlalchemy.text("""
SELECT review.id,
@@ -635,6 +642,22 @@ def delete(review_id):
db_avg_rating.update(review["entity_id"], review["entity_type"])
+def check_review_deleted(review_id) -> bool:
+ """Check if a review exists in CB.
+
+ Args:
+ review_id: ID of the review to be checked.
+ """
+ with db.engine.connect() as connection:
+ result = connection.execute(sqlalchemy.text("""
+ SELECT NOT EXISTS (SELECT true FROM review WHERE id = :review_id) AS exists
+ """), {
+ "review_id": review_id,
+ })
+
+ return dict(result.fetchone())["exists"]
+
+
def get_distinct_entities(connection):
"""
helper function for distinct_entities() that extends support for execution within a transaction by directly receiving the
diff --git a/critiquebrainz/db/review_test.py b/critiquebrainz/db/review_test.py
index c070c9a3d..7b339d1fc 100644
--- a/critiquebrainz/db/review_test.py
+++ b/critiquebrainz/db/review_test.py
@@ -1,12 +1,14 @@
from unittest.mock import MagicMock
+
from brainzutils import cache
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.users as db_users
-from critiquebrainz.db.user import User
-import critiquebrainz.db.review as db_review
-import critiquebrainz.db.revision as db_revision
+
import critiquebrainz.db.exceptions as db_exceptions
import critiquebrainz.db.license as db_license
+import critiquebrainz.db.review as db_review
+import critiquebrainz.db.revision as db_revision
+import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
+from critiquebrainz.db.user import User
class ReviewTestCase(DataTestCase):
diff --git a/critiquebrainz/db/revision.py b/critiquebrainz/db/revision.py
index 772e51b0c..e240099c9 100644
--- a/critiquebrainz/db/revision.py
+++ b/critiquebrainz/db/revision.py
@@ -1,10 +1,12 @@
from datetime import datetime
+
import sqlalchemy
+
from critiquebrainz import db
-from critiquebrainz.db import review as db_review
+from critiquebrainz.db import VALID_RATING_VALUES, RATING_SCALE_1_5, RATING_SCALE_0_100
from critiquebrainz.db import avg_rating as db_avg_rating
from critiquebrainz.db import exceptions as db_exceptions
-from critiquebrainz.db import VALID_RATING_VALUES, RATING_SCALE_1_5, RATING_SCALE_0_100
+from critiquebrainz.db import review as db_review
def get(review_id, limit=1, offset=0):
@@ -157,10 +159,11 @@ def get_revision_number(review_id, revision_id):
return rev_num
-def create(review_id, text=None, rating=None):
+def create(connection, review_id, text=None, rating=None):
"""Creates a new revision for the given review.
Args:
+ connection: connection to database to update/create the review
review_id (uuid): ID of the review.
text (str): Updated/New text part of the review.
rating (int): Updated/New rating part of the review
@@ -172,17 +175,19 @@ def create(review_id, text=None, rating=None):
# Convert ratings to values on a scale 0-100
rating = RATING_SCALE_0_100.get(rating)
- with db.engine.connect() as connection:
- connection.execute(sqlalchemy.text("""
- INSERT INTO revision(review_id, timestamp, text, rating)
- VALUES (:review_id, :timestamp, :text, :rating)
- """), {
- "review_id": review_id,
- "timestamp": datetime.now(),
- "text": text,
- "rating": rating,
- })
+ query = sqlalchemy.text("""INSERT INTO revision(review_id, timestamp, text, rating)
+ VALUES (:review_id, :timestamp, :text, :rating)""")
+ params = {
+ "review_id": review_id,
+ "timestamp": datetime.now(),
+ "text": text,
+ "rating": rating,
+ }
+
+ connection.execute(query, params)
+
+def update_rating(review_id):
# Update average rating if rating part of the review has changed
review = db_review.get_by_id(review_id)
rev_num = get_revision_number(review["id"], review["last_revision"]["id"])
@@ -190,7 +195,7 @@ def create(review_id, text=None, rating=None):
revisions = get(review["id"], limit=2, offset=0)
if revisions[0]["rating"] != revisions[1]["rating"]:
db_avg_rating.update(review["entity_id"], review["entity_type"])
- elif rating is not None:
+ else:
db_avg_rating.update(review["entity_id"], review["entity_type"])
diff --git a/critiquebrainz/db/revision_test.py b/critiquebrainz/db/revision_test.py
index 8be037e92..68da34aa8 100644
--- a/critiquebrainz/db/revision_test.py
+++ b/critiquebrainz/db/revision_test.py
@@ -1,10 +1,11 @@
from datetime import datetime
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.review as db_review
-from critiquebrainz.db.user import User
-from critiquebrainz.db import revision, vote
+
import critiquebrainz.db.license as db_license
+import critiquebrainz.db.review as db_review
import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
+from critiquebrainz.db import revision, vote
+from critiquebrainz.db.user import User
class RevisionTestCase(DataTestCase):
diff --git a/critiquebrainz/db/spam_report.py b/critiquebrainz/db/spam_report.py
index 983d50a4f..28863688f 100644
--- a/critiquebrainz/db/spam_report.py
+++ b/critiquebrainz/db/spam_report.py
@@ -1,5 +1,7 @@
from datetime import datetime
+
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import revision as db_revision
@@ -181,7 +183,6 @@ def list_reports(**kwargs):
if spam_reports:
spam_reports = [dict(spam_report) for spam_report in spam_reports]
for spam_report in spam_reports:
-
spam_report["review"] = {
"user": {
"id": spam_report.pop("review_user_id"),
diff --git a/critiquebrainz/db/spam_report_test.py b/critiquebrainz/db/spam_report_test.py
index 483eb6cc0..4a05b08a3 100644
--- a/critiquebrainz/db/spam_report_test.py
+++ b/critiquebrainz/db/spam_report_test.py
@@ -1,8 +1,8 @@
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.spam_report as db_spam_report
-import critiquebrainz.db.review as db_review
import critiquebrainz.db.license as db_license
+import critiquebrainz.db.review as db_review
+import critiquebrainz.db.spam_report as db_spam_report
import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db.user import User
@@ -63,7 +63,8 @@ def test_list_reports(self):
text="Updated Review",
)
self.review = db_review.get_by_id(self.review["id"])
- db_spam_report.create(self.review["last_revision"]["id"], self.user1.id, "This is again a report on the updated review")
+ db_spam_report.create(self.review["last_revision"]["id"], self.user1.id,
+ "This is again a report on the updated review")
# two reports on the old revision and one on the new revision.
reports, count = db_spam_report.list_reports(review_id=self.review["id"]) # pylint: disable=unused-variable
self.assertEqual(count, 3)
diff --git a/critiquebrainz/db/statistics.py b/critiquebrainz/db/statistics.py
new file mode 100644
index 000000000..02767d5ef
--- /dev/null
+++ b/critiquebrainz/db/statistics.py
@@ -0,0 +1,258 @@
+# critiquebrainz - Repository for Creative Commons licensed reviews
+#
+# Copyright (C) 2019 Bimalkant Lauhny.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+from datetime import date, timedelta
+
+import sqlalchemy
+from brainzutils import cache
+
+import critiquebrainz.db.exceptions as db_exceptions
+from critiquebrainz import db
+
+_CACHE_NAMESPACE = "cb_statistics"
+_DEFAULT_CACHE_EXPIRATION = 1 * 60 * 60 # seconds (1 hour)
+
+
+def merge_rows(list_1, list_2, key):
+ """ Merges two lists of dicts based on key in dicts
+
+ Args:
+ list_1(list[dict(),]): A list of dictionaries
+ list_2(list[dict(),]): A list of dictionaries
+ key(string): key using which lists would be merged
+
+ Returns:
+ List of dictionaries updated after merging two lists
+ """
+
+ merged = dict()
+ for row in list_1 + list_2:
+ if row[key] in merged:
+ merged[row[key]].update(row)
+ else:
+ merged[row[key]] = row
+ return list(merged.values())
+
+
+def get_users_with_review_count(from_date=date(1970, 1, 1), to_date=date.today() + timedelta(1)):
+ """ Gets list of users with number of reviews they've submitted
+
+ Args:
+ from_date(datetime): Date from which contributions by users are to be considered.
+ to_date(datetime): Date upto which contributions by users are to be considered.
+
+ Returns:
+ List of dictionaries where each dictionary has the following structure:
+ {
+ "id": (uuid),
+ "display_name": (str),
+ "review_count": (int),
+ }
+ """
+ with db.engine.connect() as connection:
+ result = connection.execute(sqlalchemy.text("""
+ SELECT id,
+ display_name,
+ COALESCE(rc, 0) AS review_count
+ FROM "user"
+ LEFT JOIN (SELECT user_id,
+ count(*) AS rc
+ FROM review
+ WHERE published_on >= :from_date AND published_on <= :to_date
+ AND review.is_draft = 'f'
+ AND review.is_hidden = 'f'
+ GROUP BY user_id) AS num_review
+ ON "user".id = num_review.user_id
+ """), {
+ "from_date": from_date,
+ "to_date": to_date,
+ })
+
+ reviewers = result.fetchall()
+ if not reviewers:
+ raise db_exceptions.NoDataFoundException("Can't get users with review count!")
+ reviewers = [dict(reviewer) for reviewer in reviewers]
+ return reviewers
+
+
+def get_users_with_vote_count(from_date=date(1970, 1, 1), to_date=date.today() + timedelta(1)):
+ """ Gets list of users with number of votes they've submitted
+
+ Args:
+ from_date(datetime): Date from which contributions by users are to be considered.
+ to_date(datetime): Date upto which contributions by users are to be considered.
+
+ Returns:
+ List of dictionaries where each dictionary has the following structure:
+ {
+ "id": (uuid),
+ "display_name": (str),
+ "vote_count": (int),
+ }
+ """
+ with db.engine.connect() as connection:
+ result = connection.execute(sqlalchemy.text("""
+ SELECT id,
+ display_name,
+ COALESCE(vc, 0) AS vote_count
+ FROM "user"
+ LEFT JOIN (SELECT user_id,
+ count(*) AS vc
+ FROM vote
+ WHERE rated_at >= :from_date AND rated_at <= :to_date
+ GROUP BY user_id) AS num_votes
+ ON "user".id = num_votes.user_id
+ """), {
+ "from_date": from_date,
+ "to_date": to_date,
+ })
+
+ voters = result.fetchall()
+ if not voters:
+ raise db_exceptions.NoDataFoundException("Can't get users with vote count!")
+ voters = [dict(voter) for voter in voters]
+ return voters
+
+
+def get_users_with_comment_count(from_date=date(1970, 1, 1), to_date=date.today() + timedelta(1)):
+ """ Gets list of users with number of comments they've submitted
+
+ Args:
+ from_date(datetime): Date from which contributions by users are to be considered.
+ to_date(datetime): Date upto which contributions by users are to be considered.
+
+ Returns:
+ List of dictionaries where each dictionary has the following structure:
+ {
+ "id": (uuid),
+ "display_name": (str),
+ "comment_count": (int),
+ }
+ """
+ with db.engine.connect() as connection:
+ result = connection.execute(sqlalchemy.text("""
+ SELECT id,
+ display_name,
+ COALESCE(cc, 0) AS comment_count
+ FROM "user"
+ LEFT JOIN (SELECT user_id,
+ count(*) AS cc
+ FROM comment
+ LEFT JOIN (SELECT comment_id,
+ min(timestamp) AS commented_at
+ FROM comment_revision
+ GROUP BY comment_id) AS comment_create
+ ON comment.id = comment_create.comment_id
+ WHERE commented_at >= :from_date AND commented_at <= :to_date
+ GROUP BY user_id) AS num_comment
+ ON "user".id = num_comment.user_id
+ """), {
+ "from_date": from_date,
+ "to_date": to_date,
+ })
+
+ commenters = result.fetchall()
+ if not commenters:
+ raise db_exceptions.NoDataFoundException("Can't get users with comment count!")
+ commenters = [dict(commenter) for commenter in commenters]
+ return commenters
+
+
+def get_top_users(from_date=date(1970, 1, 1), to_date=date.today() + timedelta(1), review_weight=1,
+ comment_weight=1, vote_weight=1, limit=10):
+ """ Gets list of top contributors based on number of reviews, votes and comments
+ along with their final scores.
+ score = (reviews * review_weight + comments * comment_weight + votes * vote_weight)
+ Results are sorted in ascending order by score and max number of results are
+ defined by 'limit'.
+
+ Args:
+ from_date(datetime): Date from which contributions by users are to be considered.
+ to_date(datetime): Date upto which contributions by users are to be considered.
+ review_weight(int): Weight for each review of a user to add to their final score
+ comment_weight(int): Weight for each comment of a user to add to their final score
+ vote_weight(int): Weight for each vote of a user to add to their final score
+
+ Returns:
+ List of dictionaries where each dictionary has the following structure:
+ {
+ "id": (uuid),
+ "display_name": (str),
+ "review_count": (int),
+ "comment_count": (int),
+ "vote_count": (int),
+ "score": (int),
+ }
+ """
+
+ reviewers = get_users_with_review_count(from_date=from_date, to_date=to_date)
+ commenters = get_users_with_comment_count(from_date=from_date, to_date=to_date)
+ voters = get_users_with_vote_count(from_date=from_date, to_date=to_date)
+
+ # merge based on user_id
+ top_scorers = merge_rows(merge_rows(reviewers, commenters, "id"), voters, "id")
+
+ # add 'score' for each user
+ for user in top_scorers:
+ user["id"] = str(user["id"])
+ user["score"] = user["review_count"] * review_weight + user["comment_count"] * comment_weight + user[
+ "vote_count"] * vote_weight
+
+ # sort top_users by 'score' in descending order and keep only top 'limit' users
+ top_scorers = sorted(top_scorers, key=lambda row: row["score"], reverse=True)[:limit]
+ if top_scorers[0]["score"] == 0:
+ top_scorers = []
+ return top_scorers
+
+
+def get_top_users_overall():
+ """ Gets top contributors since the beginning
+
+ Returns:
+ Returns:
+ List of dictionaries where each dictionary has the following structure:
+ {
+ "id": (str),
+ "display_name": (str),
+ "review_count": (int),
+ "comment_count": (int),
+ "vote_count": (int),
+ "score": (int),
+ }
+ """
+ key = cache.gen_key("top_users_overall", _CACHE_NAMESPACE)
+ top_users = cache.get(key, _CACHE_NAMESPACE)
+
+ # if could not fetch results from cache, or fetched results have to be updated
+ if not top_users:
+
+ try:
+ results = get_top_users(
+ review_weight=5,
+ comment_weight=2,
+ vote_weight=1,
+ )
+
+ top_users = {
+ "users": results,
+ }
+
+ cache.set(key=key, val=top_users, namespace=_CACHE_NAMESPACE, time=_DEFAULT_CACHE_EXPIRATION)
+ except db_exceptions.NoDataFoundException:
+ return None
+ return top_users["users"]
diff --git a/critiquebrainz/db/statistics_test.py b/critiquebrainz/db/statistics_test.py
new file mode 100644
index 000000000..252222e17
--- /dev/null
+++ b/critiquebrainz/db/statistics_test.py
@@ -0,0 +1,174 @@
+# critiquebrainz - Repository for Creative Commons licensed reviews
+#
+# Copyright (C) 2019 Bimalkant Lauhny.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+from unittest import mock
+
+import critiquebrainz.db.comment as db_comment
+import critiquebrainz.db.license as db_license
+import critiquebrainz.db.review as db_review
+import critiquebrainz.db.statistics as db_statistics
+import critiquebrainz.db.users as db_users
+import critiquebrainz.db.vote as db_vote
+from critiquebrainz.data.testing import DataTestCase
+from critiquebrainz.db.user import User
+
+
+class StatisticsTestCase(DataTestCase):
+
+ def setUp(self):
+ super(StatisticsTestCase, self).setUp()
+
+ self.user_1 = User(db_users.get_or_create(1, "Tester 1", new_user_data={
+ "display_name": "test user 1",
+ }))
+ self.user_2 = User(db_users.get_or_create(2, "Tester 2", new_user_data={
+ "display_name": "test user 2",
+ }))
+
+ self.license = db_license.create(
+ id=u'Test',
+ full_name=u"Test License",
+ )
+
+ def create_dummy_review(self, user_id):
+ return db_review.create(
+ entity_id="e7aad618-fa86-3983-9e77-405e21796eca",
+ entity_type="release_group",
+ text=u"Test review",
+ rating=5,
+ user_id=user_id,
+ is_draft=False,
+ license_id=self.license["id"],
+ )
+
+ def test_get_users_with_review_count(self):
+ # user_1 added a review
+ self.create_dummy_review(user_id=self.user_1.id)
+
+ # get list of users with review_count
+ users_review_count = db_statistics.get_users_with_review_count()
+ print("User Reviews: ", users_review_count)
+ for user in users_review_count:
+ if str(user["id"]) == self.user_1.id:
+ self.assertEqual(user["review_count"], 1)
+ else:
+ self.assertEqual(user["review_count"], 0)
+
+ def test_get_users_with_comment_count(self):
+ # user_1 added a review
+ review_1 = self.create_dummy_review(user_id=self.user_1.id)
+
+ # user_2 commented on review by user_1
+ db_comment.create(
+ user_id=self.user_2.id,
+ review_id=review_1["id"],
+ text="Test comment",
+ )
+
+ # get list of users with comment_count
+ users_comment_count = db_statistics.get_users_with_comment_count()
+ print("User Comments: ", users_comment_count)
+ for user in users_comment_count:
+ if str(user["id"]) == self.user_2.id:
+ self.assertEqual(user["comment_count"], 1)
+ else:
+ self.assertEqual(user["comment_count"], 0)
+
+ def test_get_users_with_vote_count(self):
+ # user_2 added a review
+ review_2 = self.create_dummy_review(user_id=self.user_2.id)
+
+ # user_1 upvoted review by user_2
+ db_vote.submit(
+ user_id=self.user_1.id,
+ revision_id=review_2["last_revision"]["id"],
+ vote=True,
+ )
+
+ # get list of users with comment_count
+ users_vote_count = db_statistics.get_users_with_vote_count()
+ print("User Votes: ", users_vote_count)
+ for user in users_vote_count:
+ if str(user["id"]) == self.user_1.id:
+ self.assertEqual(user["vote_count"], 1)
+ else:
+ self.assertEqual(user["vote_count"], 0)
+
+ def test_get_top_users(self):
+
+ # user_1 added a review
+ review_1 = self.create_dummy_review(user_id=self.user_1.id)
+
+ # get list of top users
+ top_users = db_statistics.get_top_users()
+ self.assertEqual(len(top_users), 2)
+ self.assertEqual(top_users[0]["id"], self.user_1.id)
+ self.assertEqual(top_users[0]["score"], 1)
+
+ # user_2 added a review
+ self.create_dummy_review(user_id=self.user_2.id)
+
+ # user_2 commented on review by user_1
+ db_comment.create(
+ user_id=self.user_2.id,
+ review_id=review_1["id"],
+ text="Test comment",
+ )
+
+ # get list of top users
+ top_users = db_statistics.get_top_users()
+ self.assertEqual(top_users[0]["id"], self.user_2.id)
+ self.assertEqual(top_users[0]["score"], 2)
+ self.assertEqual(top_users[1]["id"], self.user_1.id)
+ self.assertEqual(top_users[1]["score"], 1)
+
+ @mock.patch('brainzutils.cache.get', return_value=None)
+ @mock.patch('brainzutils.cache.set', return_value=None)
+ def test_get_top_users_overall(self, cache_set, cache_get):
+ # user_1 added a review
+ review_1 = self.create_dummy_review(user_id=self.user_1.id)
+
+ # user_2 added a review
+ review_2 = self.create_dummy_review(user_id=self.user_2.id)
+
+ # user_2 commented on review by user_1
+ db_comment.create(
+ user_id=self.user_2.id,
+ review_id=review_1["id"],
+ text="Test comment",
+ )
+
+ # user_1 upvoted review by user_2
+ db_vote.submit(
+ user_id=self.user_1.id,
+ revision_id=review_2["last_revision"]["id"],
+ vote=True,
+ )
+
+ # get list of top users
+ top_users = db_statistics.get_top_users_overall()
+ expected_key = b"top_users_overall_cb_statistics"
+ cache_get.assert_called_once_with(expected_key,
+ db_statistics._CACHE_NAMESPACE)
+ cache_set.assert_called_once_with(key=expected_key, val={"users": top_users},
+ namespace=db_statistics._CACHE_NAMESPACE,
+ time=db_statistics._DEFAULT_CACHE_EXPIRATION)
+ self.assertEqual(len(top_users), 2)
+ self.assertEqual(top_users[0]["id"], self.user_2.id)
+ self.assertEqual(top_users[0]["score"], 7)
+ self.assertEqual(top_users[1]["id"], self.user_1.id)
+ self.assertEqual(top_users[1]["score"], 6)
diff --git a/critiquebrainz/db/user.py b/critiquebrainz/db/user.py
index 4401ce3db..531bfd114 100644
--- a/critiquebrainz/db/user.py
+++ b/critiquebrainz/db/user.py
@@ -1,11 +1,11 @@
from datetime import date, timedelta
+
from critiquebrainz.data.mixins import AdminMixin
-from critiquebrainz.db import users as db_users
from critiquebrainz.data.user_types import user_types
+from critiquebrainz.db import users as db_users
class User(AdminMixin):
-
# a list of allowed values of `inc` parameter in API calls
allowed_includes = ('user_type', 'stats')
@@ -91,6 +91,7 @@ def get_user_type(user):
for user_type in user_types:
if user_type.is_instance(user):
return user_type
+
if hasattr(self, '_user_type') is False:
self._user_type = get_user_type(self)
return self._user_type
diff --git a/critiquebrainz/db/users.py b/critiquebrainz/db/users.py
index cd874e7ff..ce0e2507b 100644
--- a/critiquebrainz/db/users.py
+++ b/critiquebrainz/db/users.py
@@ -1,7 +1,9 @@
-from datetime import datetime
import uuid
+from datetime import datetime
from hashlib import md5
+
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import revision as db_revision
@@ -97,7 +99,7 @@ def get_user_by_id(connection, user_id):
result = connection.execute(query, {
"user_id": user_id
- })
+ })
row = result.fetchone()
if not row:
return None
diff --git a/critiquebrainz/db/users_test.py b/critiquebrainz/db/users_test.py
index fc398e32b..02f3ed517 100644
--- a/critiquebrainz/db/users_test.py
+++ b/critiquebrainz/db/users_test.py
@@ -1,16 +1,17 @@
from datetime import datetime, date, timedelta
from uuid import UUID
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.users as db_users
-from critiquebrainz.db.users import gravatar_url, get_many_by_mb_username
-import critiquebrainz.db.review as db_review
-import critiquebrainz.db.spam_report as db_spam_report
-import critiquebrainz.db.vote as db_vote
+
import critiquebrainz.db.comment as db_comment
import critiquebrainz.db.license as db_license
import critiquebrainz.db.oauth_client as db_oauth_client
import critiquebrainz.db.oauth_token as db_oauth_token
+import critiquebrainz.db.review as db_review
+import critiquebrainz.db.spam_report as db_spam_report
+import critiquebrainz.db.users as db_users
+import critiquebrainz.db.vote as db_vote
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db.user import User
+from critiquebrainz.db.users import gravatar_url, get_many_by_mb_username
class UserTestCase(DataTestCase):
@@ -163,7 +164,6 @@ def test_update(self):
self.assertEqual(user1['email'], 'foo@foo.com')
def test_delete(self):
-
user1_id = self.user1.id
db_users.delete(self.user1.id)
# Votes should be deleted as well
diff --git a/critiquebrainz/db/vote.py b/critiquebrainz/db/vote.py
index acbdebd91..3f9eaa1b7 100644
--- a/critiquebrainz/db/vote.py
+++ b/critiquebrainz/db/vote.py
@@ -1,5 +1,7 @@
from datetime import datetime
+
import sqlalchemy
+
from critiquebrainz import db
from critiquebrainz.db import exceptions as db_exceptions
@@ -73,3 +75,14 @@ def delete(user_id, revision_id):
"user_id": user_id,
"revision_id": revision_id,
})
+
+
+def get_count():
+ """Get the total number of votes in CritiqueBrainz.
+ """
+ with db.engine.connect() as connection:
+ result = connection.execute(sqlalchemy.text("""
+ SELECT count(*)
+ FROM vote
+ """))
+ return result.fetchone()[0]
diff --git a/critiquebrainz/db/vote_test.py b/critiquebrainz/db/vote_test.py
index 6df3acff6..e70de8f04 100644
--- a/critiquebrainz/db/vote_test.py
+++ b/critiquebrainz/db/vote_test.py
@@ -1,9 +1,10 @@
from datetime import datetime
from uuid import UUID
-from critiquebrainz.data.testing import DataTestCase
-import critiquebrainz.db.users as db_users
-import critiquebrainz.db.review as db_review
+
import critiquebrainz.db.license as db_license
+import critiquebrainz.db.review as db_review
+import critiquebrainz.db.users as db_users
+from critiquebrainz.data.testing import DataTestCase
from critiquebrainz.db import exceptions
from critiquebrainz.db import vote
from critiquebrainz.db.user import User
@@ -61,3 +62,14 @@ def test_get(self):
"revision_id": self.review["last_revision"]["id"],
"vote": False,
})
+
+ def test_get_count(self):
+ self.assertEqual(vote.get_count(), 0)
+ vote.submit(self.user_1.id, self.review["last_revision"]["id"], True)
+ self.assertEqual(vote.get_count(), 1)
+
+ def test_delete(self):
+ vote.submit(self.user_1.id, self.review["last_revision"]["id"], True)
+ self.assertEqual(vote.get_count(), 1)
+ vote.delete(self.user_1.id, self.review["last_revision"]["id"])
+ self.assertEqual(vote.get_count(), 0)
diff --git a/critiquebrainz/decorators.py b/critiquebrainz/decorators.py
index 952710664..c9440e1a0 100644
--- a/critiquebrainz/decorators.py
+++ b/critiquebrainz/decorators.py
@@ -1,5 +1,6 @@
-from functools import wraps, update_wrapper
from datetime import timedelta
+from functools import wraps, update_wrapper
+
from flask import request, current_app, make_response
@@ -16,6 +17,7 @@ def decorated_function(*args, **kwargs):
for header, value in headers.items():
h[header] = value
return resp
+
return decorated_function
return decorator
@@ -26,6 +28,7 @@ def nocache(f):
@add_response_headers({'Cache-Control': 'no-store'})
def decorated_function(*args, **kwargs):
return f(*args, **kwargs)
+
return decorated_function
@@ -69,4 +72,5 @@ def wrapped_function(*args, **kwargs):
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
+
return decorator
diff --git a/critiquebrainz/frontend/__init__.py b/critiquebrainz/frontend/__init__.py
index 314dd4881..9d2754f99 100644
--- a/critiquebrainz/frontend/__init__.py
+++ b/critiquebrainz/frontend/__init__.py
@@ -2,6 +2,7 @@
import os
import sys
from time import sleep
+
from brainzutils.flask import CustomFlask
from flask import send_from_directory
@@ -32,7 +33,8 @@ def create_app(debug=None, config_path=None):
sleep(1)
if not os.path.exists(config_file):
- print("No configuration file generated yet. Retried {} times, exiting.".format(CONSUL_CONFIG_FILE_RETRY_COUNT))
+ print("No configuration file generated yet. Retried {} times, exiting.".format(
+ CONSUL_CONFIG_FILE_RETRY_COUNT))
sys.exit(-1)
print("Loading consul config file {}".format(config_file))
@@ -63,21 +65,21 @@ def create_app(debug=None, config_path=None):
sentry_config=app.config.get("LOG_SENTRY"),
)
- # Database
- from critiquebrainz.db import init_db_engine
- init_db_engine(app.config.get("SQLALCHEMY_DATABASE_URI"))
+ # CritiqueBrainz Database
+ from critiquebrainz import db as critiquebrainz_db
+ critiquebrainz_db.init_db_engine(app.config.get("SQLALCHEMY_DATABASE_URI"))
add_robots(app)
# MusicBrainz Database
- from critiquebrainz.frontend.external import musicbrainz_db
- musicbrainz_db.init_db_engine(app.config.get('MB_DATABASE_URI'))
+ from brainzutils import musicbrainz_db
+ musicbrainz_db.init_db_engine(app.config.get("MB_DATABASE_URI"))
# Redis (cache)
from brainzutils import cache
if "REDIS_HOST" in app.config and \
- "REDIS_PORT" in app.config and \
- "REDIS_NAMESPACE" in app.config:
+ "REDIS_PORT" in app.config and \
+ "REDIS_NAMESPACE" in app.config:
cache.init(
host=app.config["REDIS_HOST"],
port=app.config["REDIS_PORT"],
@@ -115,11 +117,12 @@ def create_app(debug=None, config_path=None):
# TODO (code-master5): disabled no-member warnings just as a workaround to deal with failing tests till the
# issue [https://github.com/PyCQA/pylint/issues/2563] with pylint is resolved
app.jinja_env.add_extension('jinja2.ext.do')
- from critiquebrainz.utils import reformat_date, reformat_datetime, track_length, parameterize
+ from critiquebrainz.utils import reformat_date, reformat_datetime, track_length, track_length_ms, parameterize
from critiquebrainz.frontend.external.musicbrainz_db.entities import get_entity_by_id
app.jinja_env.filters['date'] = reformat_date
app.jinja_env.filters['datetime'] = reformat_datetime
app.jinja_env.filters['track_length'] = track_length
+ app.jinja_env.filters['track_length_ms'] = track_length_ms
app.jinja_env.filters['parameterize'] = parameterize
app.jinja_env.filters['entity_details'] = get_entity_by_id
from flask_babel import Locale, get_locale
@@ -131,8 +134,10 @@ def create_app(debug=None, config_path=None):
from critiquebrainz.frontend.views.review import review_bp
from critiquebrainz.frontend.views.search import search_bp
from critiquebrainz.frontend.views.artist import artist_bp
+ from critiquebrainz.frontend.views.label import label_bp
from critiquebrainz.frontend.views.release_group import release_group_bp
from critiquebrainz.frontend.views.release import release_bp
+ from critiquebrainz.frontend.views.work import work_bp
from critiquebrainz.frontend.views.event import event_bp
from critiquebrainz.frontend.views.mapping import mapping_bp
from critiquebrainz.frontend.views.user import user_bp
@@ -146,13 +151,16 @@ def create_app(debug=None, config_path=None):
from critiquebrainz.frontend.views.log import log_bp
from critiquebrainz.frontend.views.comment import comment_bp
from critiquebrainz.frontend.views.rate import rate_bp
+ from critiquebrainz.frontend.views.statistics import statistics_bp
app.register_blueprint(frontend_bp)
app.register_blueprint(review_bp, url_prefix='/review')
app.register_blueprint(search_bp, url_prefix='/search')
app.register_blueprint(artist_bp, url_prefix='/artist')
+ app.register_blueprint(label_bp, url_prefix='/label')
app.register_blueprint(release_group_bp, url_prefix='/release-group')
app.register_blueprint(release_bp, url_prefix='/release')
+ app.register_blueprint(work_bp, url_prefix='/work')
app.register_blueprint(event_bp, url_prefix='/event')
app.register_blueprint(place_bp, url_prefix='/place')
app.register_blueprint(mapping_bp, url_prefix='/mapping')
@@ -166,12 +174,12 @@ def create_app(debug=None, config_path=None):
app.register_blueprint(moderators_bp, url_prefix='/moderators')
app.register_blueprint(comment_bp, url_prefix='/comments')
app.register_blueprint(rate_bp, url_prefix='/rate')
+ app.register_blueprint(statistics_bp, url_prefix='/statistics')
return app
def add_robots(app):
-
@app.route('/robots.txt')
def robots_txt(): # pylint: disable=unused-variable
return send_from_directory(app.static_folder, 'robots.txt')
diff --git a/critiquebrainz/frontend/error_handlers.py b/critiquebrainz/frontend/error_handlers.py
index 28c389d68..26720fe2b 100644
--- a/critiquebrainz/frontend/error_handlers.py
+++ b/critiquebrainz/frontend/error_handlers.py
@@ -15,7 +15,6 @@ def get_sentry_event_id():
def init_error_handlers(app):
-
@app.errorhandler(400)
def bad_request(error):
return render_template('errors/400.html', error=error), 400
diff --git a/critiquebrainz/frontend/external/mbspotify.py b/critiquebrainz/frontend/external/mbspotify.py
index 1c2d1c95c..84f9fd818 100644
--- a/critiquebrainz/frontend/external/mbspotify.py
+++ b/critiquebrainz/frontend/external/mbspotify.py
@@ -4,11 +4,13 @@
Source code of mbspotify is available at https://github.com/metabrainz/mbspotify.
"""
import json
+
import requests
-from requests.exceptions import RequestException
-from requests.adapters import HTTPAdapter
-from flask_babel import lazy_gettext
from brainzutils import cache
+from flask_babel import lazy_gettext
+from requests.adapters import HTTPAdapter
+from requests.exceptions import RequestException
+
from critiquebrainz.frontend import flash
_base_url = ""
diff --git a/critiquebrainz/frontend/external/musicbrainz.py b/critiquebrainz/frontend/external/musicbrainz.py
index f4711f69b..a3c625cd4 100644
--- a/critiquebrainz/frontend/external/musicbrainz.py
+++ b/critiquebrainz/frontend/external/musicbrainz.py
@@ -8,7 +8,6 @@
"""
import musicbrainzngs
-
DEFAULT_CACHE_EXPIRATION = 12 * 60 * 60 # seconds (12 hours)
THREAD_POOL_PROCESSES = 10
@@ -44,3 +43,15 @@ def search_places(query='', limit=None, offset=None):
"""Search for places."""
api_resp = musicbrainzngs.search_places(query=query, limit=limit, offset=offset)
return api_resp.get('place-count'), api_resp.get('place-list')
+
+
+def search_works(query='', limit=None, offset=None):
+ """Search for works."""
+ api_resp = musicbrainzngs.search_works(query=query, limit=limit, offset=offset)
+ return api_resp.get('work-count'), api_resp.get('work-list')
+
+
+def search_labels(query='', limit=None, offset=None):
+ """Search for labels."""
+ api_resp = musicbrainzngs.search_labels(query=query, limit=limit, offset=offset)
+ return api_resp.get('label-count'), api_resp.get('label-list')
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/__init__.py b/critiquebrainz/frontend/external/musicbrainz_db/__init__.py
index ac794e086..e23950c1c 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/__init__.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/__init__.py
@@ -1,26 +1 @@
-from typing import Optional
-from contextlib import contextmanager
-from sqlalchemy import create_engine
-from sqlalchemy.orm import sessionmaker, scoped_session, Session
-from sqlalchemy.pool import NullPool
-
-engine = None
-Session: Optional[Session] = None # noqa: F811
DEFAULT_CACHE_EXPIRATION = 12 * 60 * 60 # seconds (12 hours)
-
-
-def init_db_engine(connect_str):
- global engine, Session
- engine = create_engine(connect_str, poolclass=NullPool)
- Session = scoped_session(
- sessionmaker(bind=engine)
- )
-
-
-@contextmanager
-def mb_session():
- session = Session()
- try:
- yield session
- finally:
- session.close()
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/artist.py b/critiquebrainz/frontend/external/musicbrainz_db/artist.py
index 55dc62338..c2c42fbcf 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/artist.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/artist.py
@@ -1,13 +1,8 @@
-from collections import defaultdict
-from sqlalchemy.orm import joinedload
-from mbdata import models
from brainzutils import cache
-from critiquebrainz.frontend.external.musicbrainz_db import mb_session, DEFAULT_CACHE_EXPIRATION
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_relationship_info
+from brainzutils.musicbrainz_db import artist as db
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
from critiquebrainz.frontend.external.relationships import artist as artist_rel
-from critiquebrainz.frontend.external.musicbrainz_db.utils import get_entities_by_gids
-from critiquebrainz.frontend.external.musicbrainz_db.serialize import to_dict_artists
-from critiquebrainz.frontend.external.musicbrainz_db.includes import check_includes
def get_artist_by_id(mbid):
@@ -18,63 +13,12 @@ def get_artist_by_id(mbid):
Returns:
Dictionary containing the artist information
"""
- key = cache.gen_key(mbid)
+ key = cache.gen_key('artist', mbid)
artist = cache.get(key)
if not artist:
- artist = _get_artist_by_id(mbid)
+ artist = db.fetch_multiple_artists(
+ [mbid],
+ includes=['artist-rels', 'url-rels'],
+ ).get(mbid)
cache.set(key=key, val=artist, time=DEFAULT_CACHE_EXPIRATION)
return artist_rel.process(artist)
-
-
-def _get_artist_by_id(mbid):
- return fetch_multiple_artists(
- [mbid],
- includes=['artist-rels', 'url-rels'],
- ).get(mbid)
-
-
-def fetch_multiple_artists(mbids, *, includes=None):
- """Get info related to multiple artists using their MusicBrainz IDs.
-
- Args:
- mbids (list): List of MBIDs of artists.
- includes (list): List of information to be included.
-
- Returns:
- Dictionary containing info of multiple artists keyed by their mbid.
- """
- if includes is None:
- includes = []
- includes_data = defaultdict(dict)
- check_includes('artist', includes)
- with mb_session() as db:
- query = db.query(models.Artist).\
- options(joinedload("type"))
- artists = get_entities_by_gids(
- query=query,
- entity_type='artist',
- mbids=mbids,
- )
- artist_ids = [artist.id for artist in artists.values()]
-
- if 'artist-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='artist',
- source_type='artist',
- source_entity_ids=artist_ids,
- includes_data=includes_data,
- )
- if 'url-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='url',
- source_type='artist',
- source_entity_ids=artist_ids,
- includes_data=includes_data,
- )
-
- for artist in artists.values():
- includes_data[artist.id]['type'] = artist.type
- artists = {str(mbid): to_dict_artists(artists[mbid], includes_data[artists[mbid].id]) for mbid in mbids}
- return artists
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/entities.py b/critiquebrainz/frontend/external/musicbrainz_db/entities.py
index 60491d19a..b28827627 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/entities.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/entities.py
@@ -1,6 +1,16 @@
-from critiquebrainz.frontend.external.musicbrainz_db.release_group import fetch_multiple_release_groups, get_release_group_by_id
-from critiquebrainz.frontend.external.musicbrainz_db.place import fetch_multiple_places, get_place_by_id
-from critiquebrainz.frontend.external.musicbrainz_db.event import fetch_multiple_events, get_event_by_id
+from brainzutils.musicbrainz_db.artist import fetch_multiple_artists
+from brainzutils.musicbrainz_db.event import fetch_multiple_events
+from brainzutils.musicbrainz_db.label import fetch_multiple_labels
+from brainzutils.musicbrainz_db.place import fetch_multiple_places
+from brainzutils.musicbrainz_db.release_group import fetch_multiple_release_groups
+from brainzutils.musicbrainz_db.work import fetch_multiple_works
+
+from critiquebrainz.frontend.external.musicbrainz_db.artist import get_artist_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.event import get_event_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.label import get_label_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.place import get_place_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.release_group import get_release_group_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.work import get_work_by_id
def get_multiple_entities(entities):
@@ -20,18 +30,30 @@ def get_multiple_entities(entities):
"""
entities_info = {}
release_group_mbids = [entity[0] for entity in filter(lambda entity: entity[1] == 'release_group', entities)]
+ artist_mbids = [entity[0] for entity in filter(lambda entity: entity[1] == 'artist', entities)]
+ label_mbids = [entity[0] for entity in filter(lambda entity: entity[1] == 'label', entities)]
place_mbids = [entity[0] for entity in filter(lambda entity: entity[1] == 'place', entities)]
event_mbids = [entity[0] for entity in filter(lambda entity: entity[1] == 'event', entities)]
+ work_mbids = [entity[0] for entity in filter(lambda entity: entity[1] == 'work', entities)]
entities_info.update(fetch_multiple_release_groups(
release_group_mbids,
includes=['artists'],
))
+ entities_info.update(fetch_multiple_artists(
+ artist_mbids,
+ ))
+ entities_info.update(fetch_multiple_labels(
+ label_mbids,
+ ))
entities_info.update(fetch_multiple_places(
place_mbids,
))
entities_info.update(fetch_multiple_events(
event_mbids,
))
+ entities_info.update(fetch_multiple_works(
+ work_mbids,
+ ))
return entities_info
@@ -39,8 +61,14 @@ def get_entity_by_id(id, type='release_group'):
"""A wrapper to call the correct get_*_by_id function."""
if type == 'release_group':
entity = get_release_group_by_id(str(id))
+ elif type == 'artist':
+ entity = get_artist_by_id(str(id))
+ elif type == 'label':
+ entity = get_label_by_id(str(id))
elif type == 'place':
entity = get_place_by_id(str(id))
elif type == 'event':
entity = get_event_by_id(str(id))
+ elif type == 'work':
+ entity = get_work_by_id(str(id))
return entity
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/event.py b/critiquebrainz/frontend/external/musicbrainz_db/event.py
index 048981a5d..4da4f81ef 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/event.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/event.py
@@ -1,11 +1,7 @@
-from collections import defaultdict
-from mbdata import models
from brainzutils import cache
-from critiquebrainz.frontend.external.musicbrainz_db import mb_session, DEFAULT_CACHE_EXPIRATION
-from critiquebrainz.frontend.external.musicbrainz_db.utils import get_entities_by_gids
-from critiquebrainz.frontend.external.musicbrainz_db.includes import check_includes
-from critiquebrainz.frontend.external.musicbrainz_db.serialize import to_dict_events
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_relationship_info
+from brainzutils.musicbrainz_db import event as db
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
def get_event_by_id(mbid):
@@ -16,82 +12,12 @@ def get_event_by_id(mbid):
Returns:
Dictionary containing the event information.
"""
- key = cache.gen_key(mbid)
+ key = cache.gen_key('event', mbid)
event = cache.get(key)
if not event:
- event = _get_event_by_id(mbid)
+ event = db.fetch_multiple_events(
+ [mbid],
+ includes=['artist-rels', 'place-rels', 'series-rels', 'url-rels', 'release-group-rels'],
+ ).get(mbid)
cache.set(key=key, val=event, time=DEFAULT_CACHE_EXPIRATION)
return event
-
-
-def _get_event_by_id(mbid):
- return fetch_multiple_events(
- [mbid],
- includes=['artist-rels', 'place-rels', 'series-rels', 'url-rels', 'release-group-rels'],
- ).get(mbid)
-
-
-def fetch_multiple_events(mbids, *, includes=None):
- """Get info related to multiple events using their MusicBrainz IDs.
-
- Args:
- mbids (list): List of MBIDs of events.
- includes (list): List of information to be included.
-
- Returns:
- Dictionary containing info of multiple events keyed by their mbid.
- """
- if includes is None:
- includes = []
- includes_data = defaultdict(dict)
- check_includes('event', includes)
- with mb_session() as db:
- query = db.query(models.Event)
- events = get_entities_by_gids(
- query=query,
- entity_type='event',
- mbids=mbids,
- )
- event_ids = [event.id for event in events.values()]
-
- if 'artist-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='artist',
- source_type='event',
- source_entity_ids=event_ids,
- includes_data=includes_data,
- )
- if 'place-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='place',
- source_type='event',
- source_entity_ids=event_ids,
- includes_data=includes_data,
- )
- if 'series-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='series',
- source_type='event',
- source_entity_ids=event_ids,
- includes_data=includes_data,
- )
- if 'url-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='url',
- source_type='event',
- source_entity_ids=event_ids,
- includes_data=includes_data,
- )
- if 'release-group-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='release_group',
- source_type='event',
- source_entity_ids=event_ids,
- includes_data=includes_data,
- )
- return {str(mbid): to_dict_events(events[mbid], includes_data[events[mbid].id]) for mbid in mbids}
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/helpers.py b/critiquebrainz/frontend/external/musicbrainz_db/helpers.py
index 2ddd90ab1..6be40b2dd 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/helpers.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/helpers.py
@@ -1,7 +1,8 @@
-from mbdata.utils.models import get_link_model
from mbdata.models import Tag
-from sqlalchemy.orm import joinedload
+from mbdata.utils.models import get_link_model
from sqlalchemy import func
+from sqlalchemy.orm import joinedload
+
from critiquebrainz.frontend.external.musicbrainz_db.utils import ENTITY_MODELS
@@ -22,17 +23,19 @@ def get_relationship_info(*, db, target_type, source_type, source_entity_ids, in
target_model = ENTITY_MODELS[target_type]
relation = get_link_model(source_model, target_model)
- query = db.query(relation).\
- options(joinedload("link", innerjoin=True)).\
+ query = db.query(relation). \
+ options(joinedload("link", innerjoin=True)). \
options(joinedload("link.link_type", innerjoin=True))
if relation.entity0.property.mapper.class_ == relation.entity1.property.mapper.class_:
_relationship_link_helper(relation, query, "entity0", "entity1", target_type, source_entity_ids, includes_data)
_relationship_link_helper(relation, query, "entity1", "entity0", target_type, source_entity_ids, includes_data)
else:
if source_model == relation.entity0.property.mapper.class_:
- _relationship_link_helper(relation, query, "entity0", "entity1", target_type, source_entity_ids, includes_data)
+ _relationship_link_helper(relation, query, "entity0", "entity1", target_type, source_entity_ids,
+ includes_data)
else:
- _relationship_link_helper(relation, query, "entity1", "entity0", target_type, source_entity_ids, includes_data)
+ _relationship_link_helper(relation, query, "entity1", "entity0", target_type, source_entity_ids,
+ includes_data)
def _relationship_link_helper(relation, query, source_attr, target_attr, target_type, source_entity_ids, includes_data):
@@ -55,7 +58,7 @@ def _relationship_link_helper(relation, query, source_attr, target_attr, target_
query = query.options(joinedload(target_attr, innerjoin=True))
relation_type = target_type + "-rels"
for link in query:
- includes_data[getattr(link, source_id_attr)].setdefault('relationship_objs', {}).\
+ includes_data[getattr(link, source_id_attr)].setdefault('relationship_objs', {}). \
setdefault(relation_type, []).append(link)
@@ -72,10 +75,10 @@ def get_tags(*, db, entity_model, tag_model, foreign_tag_id, entity_ids):
Returns:
List of tuples containing the entity_ids and the list of associated tags.
"""
- tags = db.query(entity_model.id, func.array_agg(Tag.name)).\
- join(tag_model, entity_model.id == foreign_tag_id).\
- join(Tag).\
- filter(entity_model.id.in_(entity_ids)).\
- group_by(entity_model.id).\
+ tags = db.query(entity_model.id, func.array_agg(Tag.name)). \
+ join(tag_model, entity_model.id == foreign_tag_id). \
+ join(Tag). \
+ filter(entity_model.id.in_(entity_ids)). \
+ group_by(entity_model.id). \
all()
return tags
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/includes.py b/critiquebrainz/frontend/external/musicbrainz_db/includes.py
index 325d83ea9..5626db772 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/includes.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/includes.py
@@ -21,9 +21,8 @@
'place': ["aliases", "annotation"] + RELATION_INCLUDES + TAG_INCLUDES,
'event': ["aliases"] + RELATION_INCLUDES + TAG_INCLUDES,
'release_group': ["artists", "media", "releases"] + TAG_INCLUDES + RELATION_INCLUDES,
- 'release': [
- "artists", "labels", "recordings", "release-groups", "media", "annotation", "aliases"
- ] + TAG_INCLUDES + RELATION_INCLUDES,
+ 'release': ["artists", "labels", "recordings", "release-groups", "media", "annotation", "aliases"]
+ + TAG_INCLUDES + RELATION_INCLUDES,
'artist': ["recordings", "releases", "media", "aliases", "annotation"] + RELATION_INCLUDES + TAG_INCLUDES,
}
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/label.py b/critiquebrainz/frontend/external/musicbrainz_db/label.py
new file mode 100644
index 000000000..7eaab0d3a
--- /dev/null
+++ b/critiquebrainz/frontend/external/musicbrainz_db/label.py
@@ -0,0 +1,24 @@
+from brainzutils import cache
+from brainzutils.musicbrainz_db import label as db
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
+from critiquebrainz.frontend.external.relationships import label as label_rel
+
+
+def get_label_by_id(mbid):
+ """Get label with MusicBrainz ID.
+
+ Args:
+ mbid (uuid): MBID(gid) of the label.
+ Returns:
+ Dictionary containing the label information
+ """
+ key = cache.gen_key('label', mbid)
+ label = cache.get(key)
+ if not label:
+ label = db.fetch_multiple_labels(
+ [mbid],
+ includes=['artist-rels', 'url-rels'],
+ ).get(mbid)
+ cache.set(key=key, val=label, time=DEFAULT_CACHE_EXPIRATION)
+ return label_rel.process(label)
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/place.py b/critiquebrainz/frontend/external/musicbrainz_db/place.py
index 01b274208..10d803f31 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/place.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/place.py
@@ -1,13 +1,8 @@
-from collections import defaultdict
-from mbdata import models
-from sqlalchemy.orm import joinedload
from brainzutils import cache
-from critiquebrainz.frontend.external.musicbrainz_db import mb_session, DEFAULT_CACHE_EXPIRATION
-from critiquebrainz.frontend.external.musicbrainz_db.includes import check_includes
-from critiquebrainz.frontend.external.musicbrainz_db.serialize import to_dict_places
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_relationship_info
+from brainzutils.musicbrainz_db import place as db
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
from critiquebrainz.frontend.external.relationships import place as place_rel
-from critiquebrainz.frontend.external.musicbrainz_db.utils import get_entities_by_gids
def get_place_by_id(mbid):
@@ -18,73 +13,12 @@ def get_place_by_id(mbid):
Returns:
Dictionary containing the place information.
"""
- key = cache.gen_key(mbid)
+ key = cache.gen_key('place', mbid)
place = cache.get(key)
if not place:
- place = _get_place_by_id(mbid)
+ place = db.fetch_multiple_places(
+ [mbid],
+ includes=['artist-rels', 'place-rels', 'release-group-rels', 'url-rels'],
+ ).get(mbid)
cache.set(key=key, val=place, time=DEFAULT_CACHE_EXPIRATION)
return place_rel.process(place)
-
-
-def _get_place_by_id(mbid):
- return fetch_multiple_places(
- [mbid],
- includes=['artist-rels', 'place-rels', 'release-group-rels', 'url-rels'],
- ).get(mbid)
-
-
-def fetch_multiple_places(mbids, *, includes=None):
- """Get info related to multiple places using their MusicBrainz IDs.
-
- Args:
- mbids (list): List of MBIDs of places.
- includes (list): List of information to be included.
-
- Returns:
- Dictionary containing info of multiple places keyed by their mbid.
- """
- if includes is None:
- includes = []
- includes_data = defaultdict(dict)
- check_includes('place', includes)
- with mb_session() as db:
- query = db.query(models.Place).\
- options(joinedload("area")).\
- options(joinedload("type"))
- places = get_entities_by_gids(
- query=query,
- entity_type='place',
- mbids=mbids,
- )
- place_ids = [place.id for place in places.values()]
-
- if 'artist-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='artist',
- source_type='place',
- source_entity_ids=place_ids,
- includes_data=includes_data,
- )
- if 'place-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='place',
- source_type='place',
- source_entity_ids=place_ids,
- includes_data=includes_data,
- )
- if 'url-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='url',
- source_type='place',
- source_entity_ids=place_ids,
- includes_data=includes_data,
- )
-
- for place in places.values():
- includes_data[place.id]['area'] = place.area
- includes_data[place.id]['type'] = place.type
- places = {str(mbid): to_dict_places(places[mbid], includes_data[places[mbid].id]) for mbid in mbids}
- return places
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/release.py b/critiquebrainz/frontend/external/musicbrainz_db/release.py
index 67152dbf4..86fc843c9 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/release.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/release.py
@@ -1,109 +1,23 @@
-from collections import defaultdict
-from mbdata import models
-from sqlalchemy.orm import joinedload
from brainzutils import cache
-from critiquebrainz.frontend.external.musicbrainz_db import mb_session, DEFAULT_CACHE_EXPIRATION
-from critiquebrainz.frontend.external.musicbrainz_db.includes import check_includes
-from critiquebrainz.frontend.external.musicbrainz_db.serialize import to_dict_releases
-from critiquebrainz.frontend.external.musicbrainz_db.utils import get_entities_by_gids
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_relationship_info
+from brainzutils.musicbrainz_db import release as db
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
def get_release_by_id(mbid):
- """Get release with the MusicBrainz ID.
+ """Get release with MusicBrainz ID.
Args:
mbid (uuid): MBID(gid) of the release.
Returns:
- Dictionary containing the release information.
+ Dictionary containing the release information
"""
- key = cache.gen_key(mbid)
+ key = cache.gen_key('release', mbid)
release = cache.get(key)
if not release:
- release = _get_release_by_id(mbid)
+ release = db.fetch_multiple_releases(
+ [mbid],
+ includes=['media', 'release-groups'],
+ ).get(mbid)
cache.set(key=key, val=release, time=DEFAULT_CACHE_EXPIRATION)
return release
-
-
-def _get_release_by_id(mbid):
- return fetch_multiple_releases(
- [mbid],
- includes=['media', 'release-groups'],
- ).get(mbid)
-
-
-def fetch_multiple_releases(mbids, *, includes=None):
- """Get info related to multiple releases using their MusicBrainz IDs.
-
- Args:
- mbids (list): List of MBIDs of releases.
- includes (list): List of information to be included.
-
- Returns:
- Dictionary containing info of multiple releases keyed by their mbid.
- """
- if includes is None:
- includes = []
- includes_data = defaultdict(dict)
- check_includes('release', includes)
- with mb_session() as db:
- query = db.query(models.Release)
- if 'release-groups' in includes:
- query = query.options(joinedload('release_group'))
- if 'media' in includes:
- # Fetch media with tracks
- query = query.options(joinedload('mediums')).\
- options(joinedload('mediums.tracks')).\
- options(joinedload('mediums.format')).\
- options(joinedload('mediums.tracks.recording'))
- releases = get_entities_by_gids(
- query=query,
- entity_type='release',
- mbids=mbids,
- )
- release_ids = [release.id for release in releases.values()]
-
- if 'release-groups' in includes:
- for release in releases.values():
- includes_data[release.id]['release-groups'] = release.release_group
-
- if 'media' in includes:
- for release in releases.values():
- includes_data[release.id]['media'] = release.mediums
-
- if 'url-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='url',
- source_type='release',
- source_entity_ids=release_ids,
- includes_data=includes_data,
- )
- releases = {str(mbid): to_dict_releases(releases[mbid], includes_data[releases[mbid].id]) for mbid in mbids}
- return releases
-
-
-def browse_releases(*, release_group_id, includes=None):
- """Get all the releases by a certain release group.
- You need to provide the Release Group's MusicBrainz ID.
- """
- if includes is None:
- includes = []
- with mb_session() as db:
- release_ids = db.query(models.Release.gid).\
- join(models.ReleaseGroup).\
- filter(models.ReleaseGroup.gid == release_group_id).all()
- release_ids = [release_id[0] for release_id in release_ids]
- releases = fetch_multiple_releases(release_ids, includes=includes)
- return releases
-
-
-def get_url_rels_from_releases(releases):
- """Returns all url-rels for a list of releases in a single list (of url-rel dictionaries)
- Typical usage with browse_releases()
- """
- all_url_rels = []
- for release_gid in releases.keys():
- if 'url-rels' in releases[release_gid]:
- all_url_rels.extend([url_rel for url_rel in releases[release_gid]['url-rels']])
- return all_url_rels
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/release_group.py b/critiquebrainz/frontend/external/musicbrainz_db/release_group.py
index bc2aac3d1..253b64b17 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/release_group.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/release_group.py
@@ -1,112 +1,23 @@
-from collections import defaultdict
from brainzutils import cache
-from sqlalchemy.orm import joinedload
-from sqlalchemy import case
-from mbdata import models
-from critiquebrainz.frontend.external.musicbrainz_db import mb_session, DEFAULT_CACHE_EXPIRATION
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_relationship_info, get_tags
-from critiquebrainz.frontend.external.musicbrainz_db.serialize import to_dict_release_groups
-from critiquebrainz.frontend.external.musicbrainz_db.includes import check_includes
-from critiquebrainz.frontend.external.musicbrainz_db.utils import get_entities_by_gids
+from brainzutils.musicbrainz_db import release_group as db
+
import critiquebrainz.frontend.external.relationships.release_group as release_group_rel
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
def get_release_group_by_id(mbid):
"""Get release group using the MusicBrainz ID."""
- key = cache.gen_key(mbid)
+ key = cache.gen_key('release-group', mbid)
release_group = cache.get(key)
if not release_group:
- release_group = _get_release_group_by_id(mbid)
+ release_group = db.fetch_multiple_release_groups(
+ [mbid],
+ includes=['artists', 'releases', 'release-group-rels', 'url-rels', 'tags'],
+ )[mbid]
cache.set(key=key, val=release_group, time=DEFAULT_CACHE_EXPIRATION)
return release_group_rel.process(release_group)
-def _get_release_group_by_id(mbid):
- return fetch_multiple_release_groups(
- [mbid],
- includes=['artists', 'releases', 'release-group-rels', 'url-rels', 'tags'],
- )[mbid]
-
-
-def fetch_multiple_release_groups(mbids, *, includes=None):
- includes = [] if includes is None else includes
- includes_data = defaultdict(dict)
- check_includes('release_group', includes)
- with mb_session() as db:
- # Join table meta which contains release date for a release group
- query = db.query(models.ReleaseGroup).options(joinedload("meta")).\
- options(joinedload("type"))
-
- if 'artists' in includes:
- query = query.\
- options(joinedload("artist_credit")).\
- options(joinedload("artist_credit.artists")).\
- options(joinedload("artist_credit.artists.artist"))
-
- release_groups = get_entities_by_gids(
- query=query,
- entity_type='release_group',
- mbids=mbids,
- )
- release_group_ids = [release_group.id for release_group in release_groups.values()]
-
- if 'artists' in includes:
- for release_group in release_groups.values():
- artist_credit_names = release_group.artist_credit.artists
- includes_data[release_group.id]['artist-credit-names'] = artist_credit_names
- includes_data[release_group.id]['artist-credit-phrase'] = release_group.artist_credit.name
-
- if 'releases' in includes:
- query = db.query(models.Release).filter(getattr(models.Release, "release_group_id").in_(release_group_ids))
- for release in query:
- includes_data[release.release_group_id].setdefault('releases', []).append(release)
-
- if 'release-group-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='release_group',
- source_type='release_group',
- source_entity_ids=release_group_ids,
- includes_data=includes_data,
- )
-
- if 'url-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='url',
- source_type='release_group',
- source_entity_ids=release_group_ids,
- includes_data=includes_data,
- )
-
- if 'work-rels' in includes:
- get_relationship_info(
- db=db,
- target_type='work',
- source_type='release_group',
- source_entity_ids=release_group_ids,
- includes_data=includes_data,
- )
-
- if 'tags' in includes:
- release_group_tags = get_tags(
- db=db,
- entity_model=models.ReleaseGroup,
- tag_model=models.ReleaseGroupTag,
- foreign_tag_id=models.ReleaseGroupTag.release_group_id,
- entity_ids=release_group_ids,
- )
- for release_group_id, tags in release_group_tags:
- includes_data[release_group_id]['tags'] = tags
-
- for release_group in release_groups.values():
- includes_data[release_group.id]['meta'] = release_group.meta
- includes_data[release_group.id]['type'] = release_group.type
- release_groups = {str(mbid): to_dict_release_groups(release_groups[mbid], includes_data[release_groups[mbid].id])
- for mbid in mbids}
- return release_groups
-
-
def browse_release_groups(*, artist_id, release_types=None, limit=None, offset=None):
"""Get all release groups linked to an artist.
@@ -121,32 +32,17 @@ def browse_release_groups(*, artist_id, release_types=None, limit=None, offset=N
and the total count of the release groups.
"""
artist_id = str(artist_id)
- includes_data = defaultdict(dict)
if release_types is None:
release_types = []
release_types = [release_type.capitalize() for release_type in release_types]
key = cache.gen_key(artist_id, limit, offset, *release_types)
release_groups = cache.get(key)
if not release_groups:
- with mb_session() as db:
- release_groups_query = _browse_release_groups_query(db, artist_id, release_types)
- count = release_groups_query.count()
- release_groups = release_groups_query.order_by(
- case([(models.ReleaseGroupMeta.first_release_date_year.is_(None), 1)], else_=0),
- models.ReleaseGroupMeta.first_release_date_year.desc()
- ).limit(limit).offset(offset).all()
- for release_group in release_groups:
- includes_data[release_group.id]['meta'] = release_group.meta
- release_groups = ([to_dict_release_groups(release_group, includes_data[release_group.id])
- for release_group in release_groups], count)
+ release_groups = db.get_release_groups_for_artist(
+ artist_id=artist_id,
+ release_types=release_types,
+ limit=limit,
+ offset=offset
+ )
cache.set(key=key, val=release_groups, time=DEFAULT_CACHE_EXPIRATION)
return release_groups
-
-
-def _browse_release_groups_query(db, artist_id, release_types):
- return db.query(models.ReleaseGroup).\
- options(joinedload('meta')).\
- join(models.ReleaseGroupPrimaryType).join(models.ReleaseGroupMeta).\
- join(models.ArtistCreditName, models.ArtistCreditName.artist_credit_id == models.ReleaseGroup.artist_credit_id).\
- join(models.Artist, models.Artist.id == models.ArtistCreditName.artist_id).\
- filter(models.Artist.gid == artist_id).filter(models.ReleaseGroupPrimaryType.name.in_(release_types))
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/serialize.py b/critiquebrainz/frontend/external/musicbrainz_db/serialize.py
index ca0c54eec..aae40c6f0 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/serialize.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/serialize.py
@@ -1,4 +1,5 @@
from mbdata.utils.models import get_link_target
+
from critiquebrainz.frontend.external.musicbrainz_db.utils import ENTITY_MODELS
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/test_data.py b/critiquebrainz/frontend/external/musicbrainz_db/test_data.py
deleted file mode 100644
index 5ac85ffdd..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/test_data.py
+++ /dev/null
@@ -1,418 +0,0 @@
-import datetime
-from mbdata.models import (
- LinkPlaceURL,
- LinkType,
- Place,
- PlaceType,
- LinkPlacePlace,
- URL,
- Area,
- Link,
- Artist,
- ArtistCredit,
- ArtistCreditName,
- ArtistType,
- Medium,
- MediumFormat,
- Recording,
- Release,
- ReleaseGroup,
- ReleaseGroupMeta,
- ReleaseGroupPrimaryType,
- ReleaseStatus,
- Script,
- Event,
- EventType,
- Track,
-)
-
-# Place (d71ffe38-5eaf-426b-9a2e-e1f21bc84609) with url-rels, place-rels
-area_hameenlinna = Area()
-area_hameenlinna.id = 9598
-area_hameenlinna.gid = '4479c385-74d8-4a2b-bdab-f48d1e6969ba'
-area_hameenlinna.name = 'Hämeenlinna'
-area_hameenlinna.ended = False
-area_hameenlinna.comment = ''
-
-placetype_venue = PlaceType()
-placetype_venue.id = 2
-placetype_venue.name = 'Venue'
-placetype_venue.description = 'A place that has live artistic performances.'
-placetype_venue.gid = 'cd92781a-a73f-30e8-a430-55d7521338db'
-
-place_suisto = Place()
-place_suisto.id = 955
-place_suisto.gid = 'd71ffe38-5eaf-426b-9a2e-e1f21bc84609'
-place_suisto.name = 'Suisto'
-place_suisto.address = 'Verkatehtaankuja 7, FI-13200 Hämeenlinna, Finland'
-place_suisto.coordinates = (60.997758, 24.477142)
-place_suisto.comment = ''
-place_suisto.begin_date_year = 2009
-place_suisto.ended = False
-place_suisto.area = area_hameenlinna
-place_suisto.type = placetype_venue
-
-url_1 = URL()
-url_1.id = 2003126
-url_1.gid = '7462ea62-7439-47f7-93bc-a425d1d989e8'
-url_1.url = 'http://www.suisto.fi/'
-
-linktype_official_homepage = LinkType()
-linktype_official_homepage.id = 363
-linktype_official_homepage.gid = '696b79da-7e45-40e6-a9d4-b31438eb7e5d'
-linktype_official_homepage.entity_type0 = 'place'
-linktype_official_homepage.entity_type1 = 'url'
-linktype_official_homepage.name = 'official homepage'
-linktype_official_homepage.description = 'Indicates the official homepage for a place.'
-linktype_official_homepage.link_phrase = 'official homepages'
-linktype_official_homepage.reverse_link_phrase = 'official homepage for'
-linktype_official_homepage.long_link_phrase = 'has an official homepage at'
-linktype_official_homepage.is_deprecated = False
-linktype_official_homepage.has_dates = True
-linktype_official_homepage.entity0_cardinality = 0
-linktype_official_homepage.entity1_cardinality = 0
-
-link_3 = Link()
-link_3.id = 133735
-link_3.attribute_count = 0
-link_3.created = datetime.datetime(2013, 10, 17, 14, 56, 42, 321443)
-link_3.ended = False
-link_3.link_type = linktype_official_homepage
-
-linkplaceurl_1 = LinkPlaceURL()
-linkplaceurl_1.id = 502
-linkplaceurl_1.link_order = 0
-linkplaceurl_1.entity0 = place_suisto
-linkplaceurl_1.entity1 = url_1
-linkplaceurl_1.entity0_id = place_suisto.id
-linkplaceurl_1.entity1_id = url_1.id
-linkplaceurl_1.link = link_3
-
-url_2 = URL()
-url_2.id = 2003133
-url_2.gid = '8de22e00-c8e8-475f-814e-160ef761da63'
-url_2.url = 'https://twitter.com/Suisto'
-
-linktype_social_network = LinkType()
-linktype_social_network.id = 429
-linktype_social_network.child_order = 0
-linktype_social_network.gid = '040de4d5-ace5-4cfb-8a45-95c5c73bce01'
-linktype_social_network.entity_type0 = 'place'
-linktype_social_network.entity_type1 = 'url'
-linktype_social_network.name = 'social network'
-linktype_social_network.description = 'A social network description.'
-linktype_social_network.link_phrase = 'social networking'
-linktype_social_network.reverse_link_phrase = 'social networking page for'
-linktype_social_network.long_link_phrase = 'has a social networking page at'
-linktype_social_network.is_deprecated = False
-linktype_social_network.has_dates = True
-linktype_social_network.entity0_cardinality = 0
-linktype_social_network.entity1_cardinality = 0
-
-link_4 = Link()
-link_4.id = 133745
-link_4.attribute_count = 0
-link_4.created = datetime.datetime(2013, 10, 17, 15, 6, 28, 583800)
-link_4.ended = False
-link_4.link_type = linktype_social_network
-
-linkplaceurl_2 = LinkPlaceURL()
-linkplaceurl_2.id = 507
-linkplaceurl_2.entity0 = place_suisto
-linkplaceurl_2.entity1 = url_2
-linkplaceurl_2.entity0_id = place_suisto.id
-linkplaceurl_2.entity1_id = url_2.id
-linkplaceurl_2.link = link_4
-
-place_verkatehdas = Place()
-place_verkatehdas.id = 734
-place_verkatehdas.gid = 'f9587914-8505-4bd1-833b-16a3100a4948'
-place_verkatehdas.name = 'Verkatehdas'
-place_verkatehdas.address = 'Paasikiventie 2, FI-13200 Hämeenlinna, Finland'
-place_verkatehdas.coordinates = (60.99727, 24.47651)
-place_verkatehdas.comment = ''
-place_verkatehdas.ended = False
-place_verkatehdas.area = area_hameenlinna
-place_verkatehdas.type = placetype_venue
-
-linktype_parts = LinkType()
-linktype_parts.id = 717
-linktype_parts.child_order = 0
-linktype_parts.gid = 'ff683f48-eff1-40ab-a58f-b128098ffe92'
-linktype_parts.entity_type0 = 'place'
-linktype_parts.entity_type1 = 'place'
-linktype_parts.name = 'parts'
-linktype_parts.description = 'This indicates that a place is part of another place.'
-linktype_parts.link_phrase = 'parts'
-linktype_parts.reverse_link_phrase = 'part of'
-linktype_parts.long_link_phrase = 'has part'
-linktype_parts.is_deprecated = False
-linktype_parts.has_dates = True
-
-link_1 = Link()
-link_1.id = 138113
-link_1.attribute_count = 0
-link_1.ended = False
-link_1.link_type = linktype_parts
-
-linkplaceplace_1 = LinkPlacePlace()
-linkplaceplace_1.id = 47
-linkplaceplace_1.link_order = 0
-linkplaceplace_1.entity0_credit = ''
-linkplaceplace_1.entity1_credit = ''
-linkplaceplace_1.entity0 = place_verkatehdas
-linkplaceplace_1.entity1 = place_suisto
-linkplaceplace_1.link = link_1
-
-# Release (16bee711-d7ce-48b0-adf4-51f124bcc0df) with release group(with its artist credit), medium,
-# tracks and recordings
-artisttype_person = ArtistType()
-artisttype_person.id = 1
-artisttype_person.name = 'Person'
-artisttype_person.gid = 'b6e035f4-3ce9-331c-97df-83397230b0df'
-
-artist_jay_z = Artist()
-artist_jay_z.id = 167
-artist_jay_z.gid = 'f82bcf78-5b69-4622-a5ef-73800768d9ac'
-artist_jay_z.name = 'JAY Z'
-artist_jay_z.sort_name = 'JAY Z'
-artist_jay_z.begin_date_year = 1969
-artist_jay_z.begin_date_month = 12
-artist_jay_z.begin_date_day = 4
-artist_jay_z.comment = 'US rapper, formerly Jay-Z'
-artist_jay_z.ended = False
-artist_jay_z.type = artisttype_person
-
-artistcreditname_jay_z = ArtistCreditName()
-artistcreditname_jay_z.position = 0
-artistcreditname_jay_z.name = 'Jay-Z'
-artistcreditname_jay_z.join_phrase = '/'
-artistcreditname_jay_z.artist = artist_jay_z
-
-artisttype_group = ArtistType()
-artisttype_group.id = 2
-artisttype_group.name = 'Group'
-artisttype_group.child_order = 2
-artisttype_group.gid = 'e431f5f6-b5d2-343d-8b36-72607fffb74b'
-
-artist_linkin_park = Artist()
-artist_linkin_park.id = 11330
-artist_linkin_park.gid = 'f59c5520-5f46-4d2c-b2c4-822eabf53419'
-artist_linkin_park.name = 'Linkin Park'
-artist_linkin_park.sort_name = 'Linkin Park'
-artist_linkin_park.begin_date_year = 1995
-artist_linkin_park.comment = ''
-artist_linkin_park.ended = False
-artist_linkin_park.type = artisttype_group
-
-artistcreditname_linkin_park = ArtistCreditName()
-artistcreditname_linkin_park.position = 1
-artistcreditname_linkin_park.name = 'Linkin Park'
-artistcreditname_linkin_park.join_phrase = ''
-artistcreditname_linkin_park.artist = artist_linkin_park
-
-artistcredit_jay_z_linkin_park = ArtistCredit()
-artistcredit_jay_z_linkin_park.id = 1617798
-artistcredit_jay_z_linkin_park.name = 'Jay-Z/Linkin Park'
-artistcredit_jay_z_linkin_park.artist_count = 2
-artistcredit_jay_z_linkin_park.ref_count = 5
-artistcredit_jay_z_linkin_park.created = datetime.datetime(2016, 2, 28, 21, 42, 14, 873583)
-artistcredit_jay_z_linkin_park.artists = [
- artistcreditname_jay_z,
- artistcreditname_linkin_park,
-]
-
-mediumformat_cd = MediumFormat()
-mediumformat_cd.id = 1
-mediumformat_cd.name = 'CD'
-mediumformat_cd.year = 1982
-mediumformat_cd.gid = '9712d52a-4509-3d4b-a1a2-67c88c643e31'
-
-recording_numb_encore_explicit = Recording()
-recording_numb_encore_explicit.id = 3094737
-recording_numb_encore_explicit.gid = 'daccb724-8023-432a-854c-e0accb6c8678'
-recording_numb_encore_explicit.name = 'Numb/Encore (explicit)'
-recording_numb_encore_explicit.length = 205280
-recording_numb_encore_explicit.comment = ''
-recording_numb_encore_explicit.video = False
-recording_numb_encore_explicit.artist_credit = artistcredit_jay_z_linkin_park
-
-track_numb_encore_explicit = Track()
-track_numb_encore_explicit.id = 20280427
-track_numb_encore_explicit.gid = 'dfe024b2-95b2-453f-b03e-3b9fa06f44e6'
-track_numb_encore_explicit.position = 1
-track_numb_encore_explicit.number = '1'
-track_numb_encore_explicit.name = 'Numb/Encore (explicit)'
-track_numb_encore_explicit.length = 207000
-track_numb_encore_explicit.is_data_track = False
-track_numb_encore_explicit.artist_credit = artistcredit_jay_z_linkin_park
-track_numb_encore_explicit.recording = recording_numb_encore_explicit
-
-recording_numb_encore_instrumental = Recording()
-recording_numb_encore_instrumental.id = 3094739
-recording_numb_encore_instrumental.gid = '965b75df-397d-4395-aac8-de11854c4630'
-recording_numb_encore_instrumental.name = 'Numb/Encore (instrumental)'
-recording_numb_encore_instrumental.length = 207333
-recording_numb_encore_instrumental.comment = ''
-recording_numb_encore_instrumental.video = False
-recording_numb_encore_instrumental.artist_credit = artistcredit_jay_z_linkin_park
-
-track_numb_encore_instrumental = Track()
-track_numb_encore_instrumental.id = 20280428
-track_numb_encore_instrumental.gid = '4fd6d4b0-0d14-428a-a554-1052060a9a27'
-track_numb_encore_instrumental.position = 2
-track_numb_encore_instrumental.number = '2'
-track_numb_encore_instrumental.name = 'Numb/Encore (instrumental)'
-track_numb_encore_instrumental.length = 206000
-track_numb_encore_instrumental.is_data_track = False
-track_numb_encore_instrumental.artist_credit = artistcredit_jay_z_linkin_park
-track_numb_encore_instrumental.recording = recording_numb_encore_instrumental
-
-medium_1 = Medium()
-medium_1.id = 1842217
-medium_1.position = 1
-medium_1.name = ''
-medium_1.track_count = 2
-medium_1.format = mediumformat_cd
-medium_1.tracks = [
- track_numb_encore_explicit,
- track_numb_encore_instrumental,
-]
-
-releasegroupprimarytype_single = ReleaseGroupPrimaryType()
-releasegroupprimarytype_single.id = 2
-releasegroupprimarytype_single.name = 'Single'
-releasegroupprimarytype_single.child_order = 2
-releasegroupprimarytype_single.gid = 'd6038452-8ee0-3f68-affc-2de9a1ede0b9'
-
-releasegroupmeta = ReleaseGroupMeta()
-releasegroupmeta.release_count = 3
-releasegroupmeta.first_release_date_year = 2004
-releasegroupmeta.rating = 100
-
-releasegroup_numb_encore = ReleaseGroup()
-releasegroup_numb_encore.id = 828504
-releasegroup_numb_encore.gid = '7c1014eb-454c-3867-8854-3c95d265f8de'
-releasegroup_numb_encore.name = 'Numb/Encore'
-releasegroup_numb_encore.artist_credit = artistcredit_jay_z_linkin_park
-releasegroup_numb_encore.meta = releasegroupmeta
-releasegroup_numb_encore.type = releasegroupprimarytype_single
-
-script_latin = Script()
-script_latin.id = 28
-script_latin.iso_code = 'Latn'
-script_latin.iso_number = '215'
-script_latin.name = 'Latin'
-script_latin.frequency = 4
-
-releasestatus_official = ReleaseStatus()
-releasestatus_official.id = 1
-releasestatus_official.name = 'Official'
-releasestatus_official.description = 'Description for an official release.'
-releasestatus_official.gid = '4e304316-386d-3409-af2e-78857eec5cfe'
-
-release_numb_encore = Release()
-release_numb_encore.id = 1738247
-release_numb_encore.gid = '16bee711-d7ce-48b0-adf4-51f124bcc0df'
-release_numb_encore.name = 'Numb/Encore'
-release_numb_encore.barcode = '054391612328'
-release_numb_encore.comment = ''
-release_numb_encore.artist_credit = artistcredit_jay_z_linkin_park
-release_numb_encore.mediums = [
- medium_1,
-]
-release_numb_encore.release_group = releasegroup_numb_encore
-release_numb_encore.status = releasestatus_official
-
-track_numb_encore_explicit_1 = Track()
-track_numb_encore_explicit_1.id = 7878846
-track_numb_encore_explicit_1.gid = '13aa9571-c0a0-3aaf-8159-9511658e5978'
-track_numb_encore_explicit_1.position = 1
-track_numb_encore_explicit_1.number = '1'
-track_numb_encore_explicit_1.name = 'Numb/Encore (explicit)'
-track_numb_encore_explicit_1.length = 208253
-track_numb_encore_explicit_1.is_data_track = False
-track_numb_encore_explicit_1.artist_credit = artistcredit_jay_z_linkin_park
-track_numb_encore_explicit_1.recording = recording_numb_encore_explicit
-
-track_numb_encore_instrumental_1 = Track()
-track_numb_encore_instrumental_1.id = 7878847
-track_numb_encore_instrumental_1.gid = '8f0abcc1-0ec0-3427-9e3e-925ee1e5b3e6'
-track_numb_encore_instrumental_1.position = 2
-track_numb_encore_instrumental_1.number = '2'
-track_numb_encore_instrumental_1.name = 'Numb/Encore (instrumental)'
-track_numb_encore_instrumental_1.length = 207453
-track_numb_encore_instrumental_1.is_data_track = False
-track_numb_encore_instrumental_1.artist_credit = artistcredit_jay_z_linkin_park
-track_numb_encore_instrumental_1.recording = recording_numb_encore_instrumental
-
-medium_2 = Medium()
-medium_2.id = 527716
-medium_2.position = 1
-medium_2.name = ''
-medium_2.track_count = 2
-medium_2.format = mediumformat_cd
-medium_2.tracks = [
- track_numb_encore_explicit_1,
- track_numb_encore_instrumental_1,
-]
-
-release_numb_encore_1 = Release()
-release_numb_encore_1.id = 527716
-release_numb_encore_1.gid = 'a64a0467-9d7a-4ffa-90b8-d87d9b41e311'
-release_numb_encore_1.name = 'Numb/Encore'
-release_numb_encore_1.barcode = '054391612328'
-release_numb_encore_1.comment = ''
-release_numb_encore_1.quality = -1
-release_numb_encore_1.artist_credit = artistcredit_jay_z_linkin_park
-release_numb_encore_1.mediums = [
- medium_2,
-]
-release_numb_encore_1.release_group = releasegroup_numb_encore
-release_numb_encore_1.script = script_latin
-release_numb_encore_1.status = releasestatus_official
-
-releasegroupmeta_1 = ReleaseGroupMeta()
-releasegroupmeta_1.release_count = 4
-releasegroupmeta_1.first_release_date_year = 2004
-
-releasegroup_collision_course = ReleaseGroup()
-releasegroup_collision_course.id = 1110052
-releasegroup_collision_course.gid = '8ef859e3-feb2-4dd1-93da-22b91280d768'
-releasegroup_collision_course.name = 'Collision Course'
-releasegroup_collision_course.meta = releasegroupmeta_1
-
-eventtype_festival = EventType()
-eventtype_festival.id = 2
-eventtype_festival.name = 'Festival'
-eventtype_festival.description = 'Festival description.'
-eventtype_festival.gid = 'b6ded574-b592-3f0e-b56e-5b5f06aa0678'
-
-taubertal_festival_2004 = Event()
-taubertal_festival_2004.id = 1607
-taubertal_festival_2004.gid = 'ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94'
-taubertal_festival_2004.name = 'Taubertal-Festival 2004, Day 1'
-taubertal_festival_2004.cancelled = False
-taubertal_festival_2004.ended = True
-taubertal_festival_2004.type = eventtype_festival
-
-eventtype_concert = EventType()
-eventtype_concert.id = 1
-eventtype_concert.name = 'Concert'
-eventtype_concert.description = 'Concert description.'
-eventtype_concert.gid = 'ef55e8d7-3d00-394a-8012-f5506a29ff0b'
-
-event_ra_hall_uk = Event()
-event_ra_hall_uk.id = 21675
-event_ra_hall_uk.gid = '40e6153d-a042-4c95-a0a9-b0a47e3825ce'
-event_ra_hall_uk.name = '1996-04-17: Royal Albert Hall, London, England, UK'
-event_ra_hall_uk.cancelled = False
-event_ra_hall_uk.ended = True
-event_ra_hall_uk.type = eventtype_concert
-
-release_collision_course = Release()
-release_collision_course.id = 28459
-release_collision_course.release_group = releasegroup_collision_course
-release_collision_course.gid = 'f51598f5-4ef9-4b8a-865d-06a077bf78cf'
-release_collision_course.name = 'Collision Course'
-release_collision_course.status = releasestatus_official
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/__init__.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/__init__.py
index 17b27776d..e69de29bb 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/__init__.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/tests/__init__.py
@@ -1,10 +0,0 @@
-from brainzutils import cache
-from critiquebrainz.test_config import REDIS_HOST, REDIS_PORT, REDIS_NAMESPACE
-
-
-def setup_cache():
- cache.init(
- host=REDIS_HOST,
- port=REDIS_PORT,
- namespace=REDIS_NAMESPACE,
- )
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/artist_test.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/artist_test.py
deleted file mode 100644
index d72926e67..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/artist_test.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from unittest import TestCase
-from unittest.mock import MagicMock
-from critiquebrainz.frontend.external.musicbrainz_db.test_data import artist_linkin_park, artist_jay_z
-from critiquebrainz.frontend.external.musicbrainz_db import artist as mb_artist
-from critiquebrainz.frontend.external.musicbrainz_db.tests import setup_cache
-
-
-class ArtistTestCase(TestCase):
-
- def setUp(self):
- setup_cache()
- mb_artist.mb_session = MagicMock()
- self.mock_db = mb_artist.mb_session.return_value.__enter__.return_value
- self.artist_query = self.mock_db.query.return_value.options.return_value.filter.return_value.all
-
- def test_get_by_id(self):
- self.artist_query.return_value = [artist_linkin_park]
- artist = mb_artist.get_artist_by_id("f59c5520-5f46-4d2c-b2c4-822eabf53419")
- self.assertDictEqual(artist, {
- "id": "f59c5520-5f46-4d2c-b2c4-822eabf53419",
- "name": "Linkin Park",
- "sort_name": "Linkin Park",
- "type": "Group"
- })
-
- def test_fetch_multiple_artists(self):
- self.artist_query.return_value = [artist_jay_z, artist_linkin_park]
- artists = mb_artist.fetch_multiple_artists([
- "f59c5520-5f46-4d2c-b2c4-822eabf53419",
- "f82bcf78-5b69-4622-a5ef-73800768d9ac",
- ])
- self.assertDictEqual(artists["f82bcf78-5b69-4622-a5ef-73800768d9ac"], {
- "id": "f82bcf78-5b69-4622-a5ef-73800768d9ac",
- "name": "JAY Z",
- "sort_name": "JAY Z",
- "type": "Person",
- })
- self.assertDictEqual(artists["f59c5520-5f46-4d2c-b2c4-822eabf53419"], {
- "id": "f59c5520-5f46-4d2c-b2c4-822eabf53419",
- "name": "Linkin Park",
- "sort_name": "Linkin Park",
- "type": "Group",
- })
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/event_test.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/event_test.py
deleted file mode 100644
index 7e4e7b8c4..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/event_test.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from unittest import TestCase
-from unittest.mock import MagicMock
-from critiquebrainz.frontend.external.musicbrainz_db import event as mb_event
-from critiquebrainz.frontend.external.musicbrainz_db.test_data import taubertal_festival_2004, event_ra_hall_uk
-from critiquebrainz.frontend.external.musicbrainz_db.tests import setup_cache
-import critiquebrainz.frontend.external.musicbrainz_db.utils as mb_utils
-
-
-class EventTestCase(TestCase):
-
- def setUp(self):
- setup_cache()
- mb_event.mb_session = MagicMock()
- self.mock_db = mb_event.mb_session.return_value.__enter__.return_value
- self.event_query = self.mock_db.query.return_value.filter.return_value.all
-
- def test_get_event_by_id(self):
- self.event_query.return_value = [taubertal_festival_2004]
- event = mb_event.get_event_by_id('ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94')
- self.assertDictEqual(event, {
- 'id': 'ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94',
- 'name': 'Taubertal-Festival 2004, Day 1',
- })
-
- def test_fetch_multiple_events(self):
- self.event_query.return_value = [taubertal_festival_2004, event_ra_hall_uk]
- events = mb_event.fetch_multiple_events(
- ['ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94', '40e6153d-a042-4c95-a0a9-b0a47e3825ce'],
- )
- self.assertEqual(events['ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94']['name'],
- 'Taubertal-Festival 2004, Day 1')
- self.assertEqual(events['40e6153d-a042-4c95-a0a9-b0a47e3825ce']['name'],
- '1996-04-17: Royal Albert Hall, London, England, UK')
-
- def test_unknown_place(self):
- self.event_query.return_value = []
- mb_utils.reviewed_entities = MagicMock()
- mb_utils.reviewed_entities.return_value = ['40e6153d-a042-4c95-a0a9-b0a47e3825df']
- event = mb_event.get_event_by_id('40e6153d-a042-4c95-a0a9-b0a47e3825df')
- self.assertEqual(event['name'], '[Unknown Event]')
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/helpers_test.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/helpers_test.py
deleted file mode 100644
index 192ea51ca..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/helpers_test.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from collections import defaultdict
-from unittest import TestCase
-from unittest.mock import MagicMock
-from mbdata import models
-from critiquebrainz.frontend.external.musicbrainz_db.serialize import to_dict_relationships
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_relationship_info
-import critiquebrainz.frontend.external.musicbrainz_db as mb
-from critiquebrainz.frontend.external.musicbrainz_db.test_data import linkplaceurl_1, linkplaceurl_2, place_suisto
-from critiquebrainz.frontend.external.musicbrainz_db.helpers import get_tags
-
-
-class HelpersTestCase(TestCase):
-
- def setUp(self):
- mb.mb_session = MagicMock()
- self.mock_db = mb.mb_session.return_value.__enter__.return_value
- self.tags_query = self.mock_db.query.return_value.join.return_value.\
- join.return_value.filter.return_value.group_by.return_value.all
- self.relationships_query = self.mock_db.query.return_value.options.return_value.\
- options.return_value.filter.return_value.options
-
- def test_get_tags(self):
- data = defaultdict(dict)
- self.tags_query.return_value = [(1820974, ['hip hop', 'hip-hop/rap'])]
- release_group_tags = get_tags(
- db=self.mock_db,
- entity_model=models.ReleaseGroup,
- tag_model=models.ReleaseGroupTag,
- foreign_tag_id=models.ReleaseGroupTag.release_group_id,
- entity_ids=['1820974'],
- )
- for release_group_id, tags in release_group_tags:
- data[release_group_id]['tags'] = tags
- expected_data = {
- 1820974: {
- 'tags': ['hip hop', 'hip-hop/rap']
- }
- }
- data = dict(data)
- self.assertDictEqual(data, expected_data)
-
- def test_get_relationship_info(self):
- data = {}
- self.relationships_query.return_value = [linkplaceurl_1, linkplaceurl_2]
- includes_data = defaultdict(dict)
- get_relationship_info(
- db=self.mock_db,
- target_type='url',
- source_type='place',
- source_entity_ids=['955'],
- includes_data=includes_data,
- )
- to_dict_relationships(data, place_suisto, includes_data[place_suisto.id]['relationship_objs'])
- expected_data = {
- 'url-rels': [
- {
- 'type': 'official homepage',
- 'type-id': '696b79da-7e45-40e6-a9d4-b31438eb7e5d',
- 'begin-year': None,
- 'end-year': None,
- 'direction': 'forward',
- 'url': {
- 'id': '7462ea62-7439-47f7-93bc-a425d1d989e8',
- 'url': 'http://www.suisto.fi/'
- }
- },
- {
- 'type': 'social network',
- 'type-id': '040de4d5-ace5-4cfb-8a45-95c5c73bce01',
- 'begin-year': None,
- 'end-year': None,
- 'direction': 'forward',
- 'url': {
- 'id': '8de22e00-c8e8-475f-814e-160ef761da63',
- 'url': 'https://twitter.com/Suisto'
- }
- }
- ]
- }
- self.assertDictEqual(data, expected_data)
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/place_test.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/place_test.py
deleted file mode 100644
index 96d3d4b3e..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/place_test.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from unittest import TestCase
-from unittest.mock import MagicMock
-from critiquebrainz.frontend.external.musicbrainz_db import place as mb_place
-from critiquebrainz.frontend.external.musicbrainz_db.test_data import place_suisto, place_verkatehdas
-from critiquebrainz.frontend.external.musicbrainz_db.tests import setup_cache
-import critiquebrainz.frontend.external.musicbrainz_db.utils as mb_utils
-
-
-class PlaceTestCase(TestCase):
-
- def setUp(self):
- setup_cache()
- mb_place.mb_session = MagicMock()
- self.mock_db = mb_place.mb_session.return_value.__enter__.return_value
- self.place_query = self.mock_db.query.return_value.options.return_value.options.return_value.filter.return_value.all
-
- def test_get_by_id(self):
- self.place_query.return_value = [place_suisto]
- place = mb_place.get_place_by_id('d71ffe38-5eaf-426b-9a2e-e1f21bc84609')
- self.assertEqual(place['name'], 'Suisto')
- self.assertEqual(place['type'], 'Venue')
- self.assertDictEqual(place['coordinates'], {
- 'latitude': 60.997758,
- 'longitude': 24.477142
- })
- self.assertDictEqual(place['area'], {
- 'id': '4479c385-74d8-4a2b-bdab-f48d1e6969ba',
- 'name': 'Hämeenlinna',
- })
-
- def test_fetch_multiple_places(self):
- self.place_query.return_value = [place_suisto, place_verkatehdas]
- places = mb_place.fetch_multiple_places(['f9587914-8505-4bd1-833b-16a3100a4948', 'd71ffe38-5eaf-426b-9a2e-e1f21bc84609'])
- self.assertEqual(places['d71ffe38-5eaf-426b-9a2e-e1f21bc84609']['name'], 'Suisto')
- self.assertEqual(places['f9587914-8505-4bd1-833b-16a3100a4948']['name'], 'Verkatehdas')
-
- def test_unknown_place(self):
- self.place_query.return_value = []
- mb_utils.reviewed_entities = MagicMock()
- mb_utils.reviewed_entities.return_value = ['d71ffe38-5eaf-426b-9a2e-e1f21bc846df']
- place = mb_place.get_place_by_id('d71ffe38-5eaf-426b-9a2e-e1f21bc846df')
- self.assertEqual(place['name'], '[Unknown Place]')
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/release_group_test.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/release_group_test.py
deleted file mode 100644
index 62f4082e6..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/release_group_test.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from unittest import TestCase
-from unittest.mock import MagicMock
-from critiquebrainz.frontend.external.musicbrainz_db import release_group as mb_release_group
-from critiquebrainz.frontend.external.musicbrainz_db.test_data import releasegroup_numb_encore, releasegroup_collision_course
-from critiquebrainz.frontend.external.musicbrainz_db.tests import setup_cache
-import critiquebrainz.frontend.external.musicbrainz_db.utils as mb_utils
-
-
-class ReleaseGroupTestCase(TestCase):
-
- def setUp(self):
- setup_cache()
- mb_release_group.mb_session = MagicMock()
- self.mock_db = mb_release_group.mb_session.return_value.__enter__.return_value
- # Mock sql query for fetching release groups and alter the return value to return SQLAlchemy objects.
- self._release_group_query = self.mock_db.query.return_value.options.return_value.options.return_value
- self._release_group_query_with_artists = self._release_group_query.options.return_value.options.return_value.\
- options.return_value
- self.release_group_query = self._release_group_query.filter.return_value.all
- self.release_group_query_with_artists = self._release_group_query_with_artists.filter.return_value.all
-
- def test_get_by_id(self):
- self.release_group_query_with_artists.return_value = [releasegroup_numb_encore]
- release_group = mb_release_group.get_release_group_by_id(
- '7c1014eb-454c-3867-8854-3c95d265f8de',
- )
-
- self.assertEqual(release_group['id'], '7c1014eb-454c-3867-8854-3c95d265f8de')
- self.assertEqual(release_group['title'], 'Numb/Encore')
- # Check if multiple artists are properly fetched
- self.assertEqual(release_group['artist-credit-phrase'], 'Jay-Z/Linkin Park')
- self.assertDictEqual(release_group['artist-credit'][0], {
- 'name': 'Jay-Z',
- 'artist': {
- 'id': 'f82bcf78-5b69-4622-a5ef-73800768d9ac',
- 'name': 'JAY Z',
- 'sort_name': 'JAY Z'
- },
- 'join_phrase': '/',
- })
- self.assertDictEqual(release_group['artist-credit'][1], {
- 'name': 'Linkin Park',
- 'artist': {
- 'id': 'f59c5520-5f46-4d2c-b2c4-822eabf53419',
- 'name': 'Linkin Park',
- 'sort_name': 'Linkin Park',
- },
- })
-
- def test_fetch_release_groups(self):
- self.release_group_query.return_value = [releasegroup_numb_encore, releasegroup_collision_course]
- release_groups = mb_release_group.fetch_multiple_release_groups(
- mbids=['8ef859e3-feb2-4dd1-93da-22b91280d768', '7c1014eb-454c-3867-8854-3c95d265f8de'],
- )
- self.assertEqual(len(release_groups), 2)
- self.assertEqual(release_groups['7c1014eb-454c-3867-8854-3c95d265f8de']['title'], 'Numb/Encore')
- self.assertEqual(release_groups['8ef859e3-feb2-4dd1-93da-22b91280d768']['title'], 'Collision Course')
-
- def test_fetch_browse_release_groups(self):
- mb_release_group._browse_release_groups_query = MagicMock()
- mock_query = mb_release_group._browse_release_groups_query.return_value
- mock_query.count.return_value = 2
- mock_query.order_by.return_value.limit.return_value.offset.\
- return_value.all.return_value = [releasegroup_collision_course, releasegroup_numb_encore]
- release_groups = mb_release_group.browse_release_groups(
- artist_id='f59c5520-5f46-4d2c-b2c4-822eabf53419',
- release_types=['Single', 'EP'],
- )
- self.assertListEqual(release_groups[0], [
- {
- 'id': '8ef859e3-feb2-4dd1-93da-22b91280d768',
- 'title': 'Collision Course',
- 'first-release-year': 2004,
- },
- {
- 'id': '7c1014eb-454c-3867-8854-3c95d265f8de',
- 'title': 'Numb/Encore',
- 'first-release-year': 2004,
- }
- ])
- self.assertEqual(release_groups[1], 2)
-
- def test_unknown_release_group(self):
- self.release_group_query.return_value = []
- mb_utils.reviewed_entities = MagicMock()
- mb_utils.reviewed_entities.return_value = ['8ef859e3-feb2-4dd1-93da-22b91280d7df']
- release_group = mb_release_group.get_release_group_by_id('8ef859e3-feb2-4dd1-93da-22b91280d7df')
- self.assertEqual(release_group['title'], '[Unknown Release Group]')
- self.assertListEqual(release_group['artist-credit'], [{
- 'name': '[unknown]',
- 'artist': {
- 'id': '125ec42a-7229-4250-afc5-e057484327fe',
- 'name': '[unknown]',
- 'sort_name': '[unknown]'
- }
- }])
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/release_test.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/release_test.py
deleted file mode 100644
index ebf46d7c4..000000000
--- a/critiquebrainz/frontend/external/musicbrainz_db/tests/release_test.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from unittest import TestCase
-from unittest.mock import MagicMock
-from critiquebrainz.frontend.external.musicbrainz_db.tests import setup_cache
-from critiquebrainz.frontend.external.musicbrainz_db.test_data import (
- release_numb_encore,
- release_collision_course,
- release_numb_encore_1,
-)
-from critiquebrainz.frontend.external.musicbrainz_db import release as mb_release
-
-
-class ReleaseTestCase(TestCase):
-
- def setUp(self):
- setup_cache()
- mb_release.mb_session = MagicMock()
- self.mock_db = mb_release.mb_session.return_value.__enter__.return_value
- self.release_query = self.mock_db.query.return_value.options.return_value.options.return_value.\
- options.return_value.options.return_value.options.return_value.filter.return_value.all
-
- def test_get_by_id(self):
- self.release_query.return_value = [release_numb_encore]
- release = mb_release.get_release_by_id('16bee711-d7ce-48b0-adf4-51f124bcc0df')
- self.assertEqual(release["name"], "Numb/Encore")
- self.assertEqual(len(release["medium-list"][0]["track-list"]), 2)
- self.assertDictEqual(release["medium-list"][0]["track-list"][0], {
- "id": "dfe024b2-95b2-453f-b03e-3b9fa06f44e6",
- "name": "Numb/Encore (explicit)",
- "number": "1",
- "position": 1,
- "length": 207000,
- "recording_id": "daccb724-8023-432a-854c-e0accb6c8678",
- "recording_title": "Numb/Encore (explicit)"
- })
-
- def test_fetch_multiple_releases(self):
- self.mock_db.query.return_value.filter.return_value.all.return_value = [release_numb_encore_1, release_collision_course]
- releases = mb_release.fetch_multiple_releases(
- mbids=['f51598f5-4ef9-4b8a-865d-06a077bf78cf', 'a64a0467-9d7a-4ffa-90b8-d87d9b41e311'],
- )
- self.assertEqual(len(releases), 2)
- self.assertEqual(releases['a64a0467-9d7a-4ffa-90b8-d87d9b41e311']['name'], 'Numb/Encore')
- self.assertEqual(releases['f51598f5-4ef9-4b8a-865d-06a077bf78cf']['name'], 'Collision Course')
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/tests/test_cache.py b/critiquebrainz/frontend/external/musicbrainz_db/tests/test_cache.py
new file mode 100644
index 000000000..f93cbeeed
--- /dev/null
+++ b/critiquebrainz/frontend/external/musicbrainz_db/tests/test_cache.py
@@ -0,0 +1,264 @@
+from unittest import mock
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
+from critiquebrainz.frontend.external.musicbrainz_db.artist import get_artist_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.event import get_event_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.label import get_label_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.place import get_place_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.release import get_release_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.release_group import get_release_group_by_id
+from critiquebrainz.frontend.external.musicbrainz_db.work import get_work_by_id
+from critiquebrainz.data.testing import DataTestCase
+
+
+class CacheTestCase(DataTestCase):
+
+ def setUp(self):
+ super(CacheTestCase, self).setUp()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.artist.fetch_multiple_artists')
+ def test_artist_cache(self, artist_fetch, cache_set, cache_get):
+ mbid = "f59c5520-5f46-4d2c-b2c4-822eabf53419"
+ expected_key = b"artist_f59c5520-5f46-4d2c-b2c4-822eabf53419"
+ artist = {
+ "id": "f59c5520-5f46-4d2c-b2c4-822eabf53419",
+ "name": "Linkin Park",
+ "sort_name": "Linkin Park",
+ "type": "Group"
+ }
+ artist_fetch.return_value = {mbid: artist}
+
+ cache_get.return_value = None
+ get_artist_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ artist_fetch.assert_called_with([mbid], includes=['artist-rels', 'url-rels'])
+ cache_set.assert_called_with(key=expected_key, val=artist, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = artist
+ cache_set.reset_mock()
+ artist_fetch.reset_mock()
+ get_artist_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ artist_fetch.assert_not_called()
+ cache_set.assert_not_called()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.event.fetch_multiple_events')
+ def test_event_cache(self, event_fetch, cache_set, cache_get):
+ mbid = "ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94"
+ expected_key = b"event_ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94"
+ event = {
+ 'id': 'ebe6ce0f-22c0-4fe7-bfd4-7a0397c9fe94',
+ 'name': 'Taubertal-Festival 2004, Day 1',
+ }
+ event_fetch.return_value = {mbid: event}
+
+ cache_get.return_value = None
+ get_event_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ event_fetch.assert_called_with([mbid], includes=['artist-rels', 'place-rels',
+ 'series-rels', 'url-rels', 'release-group-rels'])
+ cache_set.assert_called_with(key=expected_key, val=event, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = event
+ cache_set.reset_mock()
+ event_fetch.reset_mock()
+ get_event_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ event_fetch.assert_not_called()
+ cache_set.assert_not_called()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.label.fetch_multiple_labels')
+ def test_label_cache(self, label_fetch, cache_set, cache_get):
+ mbid = "1aed8c3b-8e1e-46f8-b558-06357ff5f298"
+ expected_key = b"label_1aed8c3b-8e1e-46f8-b558-06357ff5f298"
+ label = {
+ "id": "1aed8c3b-8e1e-46f8-b558-06357ff5f298",
+ "name": "Dreamville",
+ "type": "Imprint",
+ "area": "United States",
+ }
+ label_fetch.return_value = {mbid: label}
+
+ cache_get.return_value = None
+ get_label_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ label_fetch.assert_called_with([mbid], includes=['artist-rels', 'url-rels'])
+ cache_set.assert_called_with(key=expected_key, val=label, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = label
+ cache_set.reset_mock()
+ label_fetch.reset_mock()
+ get_label_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ label_fetch.assert_not_called()
+ cache_set.assert_not_called()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.place.fetch_multiple_places')
+ def test_place_cache(self, place_fetch, cache_set, cache_get):
+ mbid = "d71ffe38-5eaf-426b-9a2e-e1f21bc84609"
+ expected_key = b"place_d71ffe38-5eaf-426b-9a2e-e1f21bc84609"
+ place = {
+ "id": "d71ffe38-5eaf-426b-9a2e-e1f21bc84609",
+ "name": "Suisto",
+ "coordinates": {
+ 'latitude': 60.997758,
+ 'longitude': 24.477142
+ },
+ "area": {
+ "id": "4479c385-74d8-4a2b-bdab-f48d1e6969ba",
+ "name": "Hämeenlinna",
+ }
+ }
+ place_fetch.return_value = {mbid: place}
+
+ cache_get.return_value = None
+ get_place_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ place_fetch.assert_called_with([mbid], includes=['artist-rels', 'place-rels',
+ 'release-group-rels', 'url-rels'])
+ cache_set.assert_called_with(key=expected_key, val=place, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = place
+ cache_set.reset_mock()
+ place_fetch.reset_mock()
+ get_place_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ place_fetch.assert_not_called()
+ cache_set.assert_not_called()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.release.fetch_multiple_releases')
+ def test_release_cache(self, release_fetch, cache_set, cache_get):
+ mbid = "16bee711-d7ce-48b0-adf4-51f124bcc0df"
+ expected_key = b"release_16bee711-d7ce-48b0-adf4-51f124bcc0df"
+ release = {
+ "id": "16bee711-d7ce-48b0-adf4-51f124bcc0df",
+ "name": "Numb/Encore",
+ "medium-list": [{
+ "track_list": [{
+ "id": "dfe024b2-95b2-453f-b03e-3b9fa06f44e6",
+ "name": "Numb/Encore (explicit)",
+ "number": "1",
+ "position": 1,
+ "length": 207000,
+ "recording_id": "daccb724-8023-432a-854c-e0accb6c8678",
+ "recording_title": "Numb/Encore (explicit)"
+ }]
+ }]
+ }
+ release_fetch.return_value = {mbid: release}
+
+ cache_get.return_value = None
+ get_release_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ release_fetch.assert_called_with([mbid], includes=['media', 'release-groups'])
+ cache_set.assert_called_with(key=expected_key, val=release, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = release
+ cache_set.reset_mock()
+ release_fetch.reset_mock()
+ get_release_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ release_fetch.assert_not_called()
+ cache_set.assert_not_called()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.release_group.fetch_multiple_release_groups')
+ def test_release_group_cache(self, release_group_fetch, cache_set, cache_get):
+ mbid = "7c1014eb-454c-3867-8854-3c95d265f8de"
+ expected_key = b"release-group_7c1014eb-454c-3867-8854-3c95d265f8de"
+ release_group = {
+ 'id': '7c1014eb-454c-3867-8854-3c95d265f8de',
+ 'title': 'Numb/Encore',
+ 'artist-credit-phrase': 'Jay-Z/Linkin Park',
+ 'artist-credit': [{
+ 'name': 'Jay-Z',
+ 'artist': {
+ 'id': 'f82bcf78-5b69-4622-a5ef-73800768d9ac',
+ 'name': 'JAY Z',
+ 'sort_name': 'JAY Z'
+ },
+ 'join_phrase': '/',
+ }]
+ }
+ release_group_fetch.return_value = {mbid: release_group}
+
+ cache_get.return_value = None
+ get_release_group_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ release_group_fetch.assert_called_with([mbid], includes=['artists', 'releases',
+ 'release-group-rels', 'url-rels', 'tags'])
+ cache_set.assert_called_with(key=expected_key, val=release_group, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = release_group
+ cache_set.reset_mock()
+ release_group_fetch.reset_mock()
+ get_release_group_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ release_group_fetch.assert_not_called()
+ cache_set.assert_not_called()
+
+ @mock.patch('brainzutils.cache.get')
+ @mock.patch('brainzutils.cache.set')
+ @mock.patch('brainzutils.musicbrainz_db.work.fetch_multiple_works')
+ def test_work_cache(self, work_fetch, cache_set, cache_get):
+ mbid = "54ce5e07-2aca-4578-83d8-5a41a7b2f434"
+ expected_key = b"work_54ce5e07-2aca-4578-83d8-5a41a7b2f434"
+ work = {
+ "id": "54ce5e07-2aca-4578-83d8-5a41a7b2f434",
+ "name": "a lot",
+ "type": "Song",
+ }
+ work_fetch.return_value = {mbid: work}
+
+ cache_get.return_value = None
+ get_work_by_id(mbid)
+
+ # Test that first time data is fetched database is queried
+ cache_get.assert_called_with(expected_key)
+ work_fetch.assert_called_with([mbid], includes=['artist-rels', 'recording-rels'])
+ cache_set.assert_called_with(key=expected_key, val=work, time=DEFAULT_CACHE_EXPIRATION)
+
+ cache_get.return_value = work
+ cache_set.reset_mock()
+ work_fetch.reset_mock()
+ get_work_by_id(mbid)
+
+ # Test that second time data is fetched from cache
+ cache_get.assert_called_with(expected_key)
+ work_fetch.assert_not_called()
+ cache_set.assert_not_called()
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/utils.py b/critiquebrainz/frontend/external/musicbrainz_db/utils.py
index 3bc698209..7fdfbd1fe 100644
--- a/critiquebrainz/frontend/external/musicbrainz_db/utils.py
+++ b/critiquebrainz/frontend/external/musicbrainz_db/utils.py
@@ -1,8 +1,8 @@
from mbdata import models
+
import critiquebrainz.frontend.external.musicbrainz_db.exceptions as mb_exceptions
-from critiquebrainz.frontend.external.musicbrainz_db import special_entities
from critiquebrainz.db.review import reviewed_entities, ENTITY_TYPES as CB_ENTITIES
-
+from critiquebrainz.frontend.external.musicbrainz_db import special_entities
# Entity models
ENTITY_MODELS = {
@@ -15,7 +15,6 @@
'url': models.URL,
}
-
# Redirect models
REDIRECT_MODELS = {
'place': models.PlaceGIDRedirect,
@@ -84,5 +83,6 @@ def get_entities_by_gids(*, query, entity_type, mbids):
remaining_gids = list(set(remaining_gids) - set(reviewed_gids))
if remaining_gids:
- raise mb_exceptions.NoDataFoundException("Couldn't find entities with IDs: {mbids}".format(mbids=remaining_gids))
+ raise mb_exceptions.NoDataFoundException(
+ "Couldn't find entities with IDs: {mbids}".format(mbids=remaining_gids))
return entities
diff --git a/critiquebrainz/frontend/external/musicbrainz_db/work.py b/critiquebrainz/frontend/external/musicbrainz_db/work.py
new file mode 100644
index 000000000..bc9238a88
--- /dev/null
+++ b/critiquebrainz/frontend/external/musicbrainz_db/work.py
@@ -0,0 +1,23 @@
+from brainzutils import cache
+from brainzutils.musicbrainz_db import work as db
+
+from critiquebrainz.frontend.external.musicbrainz_db import DEFAULT_CACHE_EXPIRATION
+
+
+def get_work_by_id(mbid):
+ """Get work with MusicBrainz ID.
+
+ Args:
+ mbid (uuid): MBID(gid) of the work.
+ Returns:
+ Dictionary containing the work information
+ """
+ key = cache.gen_key('work', mbid)
+ work = cache.get(key)
+ if not work:
+ work = db.fetch_multiple_works(
+ [mbid],
+ includes=['artist-rels', 'recording-rels'],
+ ).get(mbid)
+ cache.set(key=key, val=work, time=DEFAULT_CACHE_EXPIRATION)
+ return work
diff --git a/critiquebrainz/frontend/external/relationships/artist.py b/critiquebrainz/frontend/external/relationships/artist.py
index 009e19141..d6cd4cd98 100644
--- a/critiquebrainz/frontend/external/relationships/artist.py
+++ b/critiquebrainz/frontend/external/relationships/artist.py
@@ -2,6 +2,7 @@
Relationship processor for artist entity.
"""
import urllib.parse
+
from flask_babel import lazy_gettext
diff --git a/critiquebrainz/frontend/external/relationships/label.py b/critiquebrainz/frontend/external/relationships/label.py
new file mode 100644
index 000000000..200119872
--- /dev/null
+++ b/critiquebrainz/frontend/external/relationships/label.py
@@ -0,0 +1,77 @@
+"""
+Relationship processor for label entity.
+"""
+import urllib.parse
+
+from flask_babel import lazy_gettext
+
+
+def process(label):
+ """Handles processing supported relation lists."""
+ if 'url-rels' in label and label['url-rels']:
+ label['external-urls'] = _url(label['url-rels'])
+ return label
+
+
+def _url(url_list):
+ """Processor for Label-URL relationship."""
+ basic_types = {
+ 'wikidata': {'name': lazy_gettext('Wikidata'), 'icon': 'wikidata-16.png', },
+ 'discogs': {'name': lazy_gettext('Discogs'), 'icon': 'discogs-16.png', },
+ 'allmusic': {'name': lazy_gettext('Allmusic'), 'icon': 'allmusic-16.png', },
+ 'bandcamp': {'name': lazy_gettext('Bandcamp'), 'icon': 'bandcamp-16.png', },
+ 'official homepage': {'name': lazy_gettext('Official homepage'), 'icon': 'home-16.png', },
+ 'BBC Music page': {'name': lazy_gettext('BBC Music'), },
+ }
+ external_urls = []
+ for relation in url_list:
+ if relation['type'] in basic_types:
+ external_urls.append(dict(list(relation.items()) + list(basic_types[relation['type']].items())))
+ else:
+ try:
+ target = urllib.parse.urlparse(relation['target'])
+ if relation['type'] == 'lyrics':
+ external_urls.append(dict(
+ relation.items() + {
+ 'name': lazy_gettext('Lyrics'),
+ 'disambiguation': target.netloc,
+ }.items()))
+ elif relation['type'] == 'wikipedia':
+ external_urls.append(dict(
+ relation.items() + {
+ 'name': lazy_gettext('Wikipedia'),
+ 'disambiguation': (
+ target.netloc.split('.')[0] +
+ ':' +
+ urllib.parse.unquote(target.path.split('/')[2]).decode('utf8').replace("_", " ")
+ ),
+ 'icon': 'wikipedia-16.png',
+ }.items()))
+ elif relation['type'] == 'youtube':
+ path = target.path.split('/')
+ if path[1] == 'user' or path[1] == 'channel':
+ disambiguation = path[2]
+ else:
+ disambiguation = path[1]
+ external_urls.append(dict(
+ relation.items() + {
+ 'name': lazy_gettext('YouTube'),
+ 'disambiguation': disambiguation,
+ 'icon': 'youtube-16.png',
+ }.items()))
+ elif relation['type'] == 'social network':
+ if target.netloc == 'twitter.com':
+ external_urls.append(dict(
+ relation.items() + {
+ 'name': lazy_gettext('Twitter'),
+ 'disambiguation': target.path.split('/')[1],
+ 'icon': 'twitter-16.png',
+ }.items()))
+ else:
+ # TODO(roman): Process other types here
+ pass
+ except Exception: # FIXME(roman): Too broad exception clause.
+ # TODO(roman): Log error.
+ pass
+
+ return sorted(external_urls, key=lambda k: k['name'])
diff --git a/critiquebrainz/frontend/external/relationships/release_group.py b/critiquebrainz/frontend/external/relationships/release_group.py
index dd2f3ccda..ff84bb8c0 100644
--- a/critiquebrainz/frontend/external/relationships/release_group.py
+++ b/critiquebrainz/frontend/external/relationships/release_group.py
@@ -2,6 +2,7 @@
Relationship processor for release group entity.
"""
import urllib.parse
+
from flask_babel import lazy_gettext
diff --git a/critiquebrainz/frontend/external/soundcloud.py b/critiquebrainz/frontend/external/soundcloud.py
index 56f6e3dfb..3c242faa6 100644
--- a/critiquebrainz/frontend/external/soundcloud.py
+++ b/critiquebrainz/frontend/external/soundcloud.py
@@ -1,5 +1,6 @@
import re
-import critiquebrainz.frontend.external.musicbrainz_db.release as mb_release
+
+import brainzutils.musicbrainz_db.release as mb_release
def get_url(mbid):
diff --git a/critiquebrainz/frontend/external/spotify.py b/critiquebrainz/frontend/external/spotify.py
index 0dd74e721..5563634c5 100644
--- a/critiquebrainz/frontend/external/spotify.py
+++ b/critiquebrainz/frontend/external/spotify.py
@@ -3,14 +3,16 @@
More information about it is available at https://developer.spotify.com/web-api/.
"""
-from base64 import b64encode
-from typing import List
-from http import HTTPStatus
import logging
import urllib.parse
+from base64 import b64encode
+from http import HTTPStatus
+from typing import List
+
import requests
from brainzutils import cache
from flask import current_app as app
+
from critiquebrainz.frontend.external.exceptions import ExternalServiceException
_DEFAULT_CACHE_EXPIRATION = 12 * 60 * 60 # seconds (12 hours)
diff --git a/critiquebrainz/frontend/external/spotify_test.py b/critiquebrainz/frontend/external/spotify_test.py
index 6099a9367..f10d3b539 100644
--- a/critiquebrainz/frontend/external/spotify_test.py
+++ b/critiquebrainz/frontend/external/spotify_test.py
@@ -1,9 +1,11 @@
-from unittest.mock import MagicMock
from http import HTTPStatus
+from unittest.mock import MagicMock
+
import requests
from brainzutils import cache
-from critiquebrainz.frontend.testing import FrontendTestCase
+
from critiquebrainz.frontend.external import spotify
+from critiquebrainz.frontend.testing import FrontendTestCase
class MockResponse:
diff --git a/critiquebrainz/frontend/forms/review.py b/critiquebrainz/frontend/forms/review.py
index 4d0a6b1f1..2d651f6d8 100644
--- a/critiquebrainz/frontend/forms/review.py
+++ b/critiquebrainz/frontend/forms/review.py
@@ -41,7 +41,7 @@ class ReviewEditForm(FlaskForm):
('CC BY-SA 3.0', lazy_gettext('Allow commercial use of this review(CC BY-SA 3.0 license)')), # noqa: E501
('CC BY-NC-SA 3.0', lazy_gettext('Do not allow commercial use of this review, unless approved by MetaBrainz Foundation (CC BY-NC-SA 3.0 license)')), # noqa: E501
],
- validators=[validators.DataRequired(message=lazy_gettext("You need to choose a license!"))])
+ validators=[validators.InputRequired(message=lazy_gettext("You need to choose a license"))])
remember_license = BooleanField(lazy_gettext("Remember this license choice for further preference"))
language = SelectField(lazy_gettext("You need to accept the license agreement!"), choices=languages)
rating = IntegerField(lazy_gettext("Rating"), widget=Input(input_type='number'), validators=[validators.Optional()])
diff --git a/critiquebrainz/frontend/login/__init__.py b/critiquebrainz/frontend/login/__init__.py
index 6fe258a22..956d4b7a7 100644
--- a/critiquebrainz/frontend/login/__init__.py
+++ b/critiquebrainz/frontend/login/__init__.py
@@ -4,12 +4,14 @@
It is based on OAuth2 protocol. MusicBrainz is the only supported provider.
"""
from functools import wraps
+
from flask import redirect, url_for
-from flask_login import LoginManager, current_user
from flask_babel import lazy_gettext, gettext
+from flask_login import LoginManager, current_user
from werkzeug.exceptions import Unauthorized
-from critiquebrainz.data.mixins import AnonymousUser
+
import critiquebrainz.db.users as db_users
+from critiquebrainz.data.mixins import AnonymousUser
from critiquebrainz.db.user import User
mb_auth = None
diff --git a/critiquebrainz/frontend/login/provider.py b/critiquebrainz/frontend/login/provider.py
index 41af1f0b8..870c09fba 100644
--- a/critiquebrainz/frontend/login/provider.py
+++ b/critiquebrainz/frontend/login/provider.py
@@ -1,9 +1,10 @@
import json
-from rauth import OAuth2Service
+
from flask import request, session, url_for
+from rauth import OAuth2Service
-from critiquebrainz.db.user import User
import critiquebrainz.db.users as db_users
+from critiquebrainz.db.user import User
from critiquebrainz.utils import generate_string
diff --git a/critiquebrainz/frontend/static/gulpfile.js b/critiquebrainz/frontend/static/gulpfile.js
deleted file mode 100644
index 4b4c14ac5..000000000
--- a/critiquebrainz/frontend/static/gulpfile.js
+++ /dev/null
@@ -1,121 +0,0 @@
-let _ = require('lodash');
-let fs = require('fs');
-let gulp = require('gulp');
-let less = require('gulp-less');
-let path = require('path');
-let rev = require('gulp-rev');
-let source = require('vinyl-source-stream');
-let streamify = require('gulp-streamify');
-let through2 = require('through2');
-let Q = require('q');
-let yarb = require('yarb');
-
-const CACHED_BUNDLES = new Map();
-const STATIC_DIR = path.resolve(__dirname, '../static');
-const BUILD_DIR = path.resolve(STATIC_DIR, 'build');
-const STYLES_DIR = path.resolve(STATIC_DIR, 'styles');
-const SCRIPTS_DIR = path.resolve(STATIC_DIR, 'scripts');
-
-const revManifestPath = path.resolve(BUILD_DIR, 'rev-manifest.json');
-const revManifest = {};
-
-if (fs.existsSync(revManifestPath)) {
- _.assign(revManifest, JSON.parse(fs.readFileSync(revManifestPath)));
-}
-
-function writeManifest() {
- fs.writeFileSync(revManifestPath, JSON.stringify(revManifest));
-}
-
-function writeResource(stream) {
- let deferred = Q.defer();
-
- stream
- .pipe(streamify(rev()))
- .pipe(gulp.dest(BUILD_DIR))
- .pipe(rev.manifest())
- .pipe(through2.obj(function (chunk, encoding, callback) {
- _.assign(revManifest, JSON.parse(chunk.contents));
- callback();
- }))
- .on('finish', function () {
- deferred.resolve();
- });
-
- return deferred.promise;
-}
-
-function buildStyles() {
- return writeResource(
- gulp.src(path.resolve(STYLES_DIR, '*.less'))
- .pipe(less({
- rootpath: '/static/',
- relativeUrls: true,
- plugins: [
- new (require('less-plugin-clean-css'))({compatibility: 'ie8'})
- ]
- }))
- ).done(writeManifest);
-}
-
-function transformBundle(bundle) {
- bundle.transform('babelify');
- bundle.transform('envify', {global: true});
- return bundle;
-}
-
-function runYarb(resourceName, callback) {
- if (resourceName in CACHED_BUNDLES) {
- return CACHED_BUNDLES.get(resourceName);
- }
-
- let bundle = transformBundle(yarb(path.resolve(SCRIPTS_DIR, resourceName), {
- debug: false // disable sourcemaps
- }));
-
- if (callback) {
- callback(bundle);
- }
-
- CACHED_BUNDLES.set(resourceName, bundle);
- return bundle;
-}
-
-function bundleScripts(b, resourceName) {
- return b.bundle().on('error', console.log).pipe(source(resourceName));
-}
-
-function writeScript(b, resourceName) {
- return writeResource(bundleScripts(b, resourceName));
-}
-
-function buildScripts() {
- process.env.NODE_ENV = String(process.env.DEVELOPMENT_SERVER) === '1' ? 'development' : 'production';
-
- let commonBundle = runYarb('common.js');
- let leafletBundle = runYarb('leaflet.js', function (b) {
- b.external(commonBundle);
- });
- let spotifyBundle = runYarb('spotify.js', function (b) {
- b.external(commonBundle);
- });
- let ratingBundle = runYarb('rating.js', function (b) {
- b.external(commonBundle);
- });
- let wysiwygBundle = runYarb('wysiwyg-editor.js', function(b) {
- b.external(commonBundle)
- });
-
- return Q.all([
- writeScript(commonBundle, 'common.js'),
- writeScript(leafletBundle, 'leaflet.js'),
- writeScript(spotifyBundle, 'spotify.js'),
- writeScript(ratingBundle, 'rating.js'),
- writeScript(wysiwygBundle, 'wysiwyg-editor.js'),
- ]).then(writeManifest);
-}
-
-gulp.task('styles', buildStyles);
-gulp.task('scripts', buildScripts);
-
-gulp.task('default', ['styles', 'scripts']);
diff --git a/critiquebrainz/frontend/static/scripts/leaflet.js b/critiquebrainz/frontend/static/scripts/leaflet.js
index 2fa56e867..5ad553c1f 100644
--- a/critiquebrainz/frontend/static/scripts/leaflet.js
+++ b/critiquebrainz/frontend/static/scripts/leaflet.js
@@ -1,2 +1,3 @@
+import '../styles/leaflet.less';
var L = require('leaflet');
-L.Icon.Default.imagePath = '/static/images';
+L.Icon.Default.imagePath = '/static/images/';
diff --git a/critiquebrainz/frontend/static/scripts/main.js b/critiquebrainz/frontend/static/scripts/main.js
new file mode 100644
index 000000000..28692e7af
--- /dev/null
+++ b/critiquebrainz/frontend/static/scripts/main.js
@@ -0,0 +1 @@
+import '../styles/main.less';
diff --git a/critiquebrainz/frontend/static/styles/main.less b/critiquebrainz/frontend/static/styles/main.less
index a82bd3df2..9c5477637 100644
--- a/critiquebrainz/frontend/static/styles/main.less
+++ b/critiquebrainz/frontend/static/styles/main.less
@@ -10,8 +10,11 @@
// Entity colors
@rg-color: @blue;
+@artist-color: @blue;
@event-color: @green;
@place-color: @yellow;
+@work-color: @blue;
+@label-color: @blue;
body {
padding-bottom: 25px;
@@ -52,6 +55,8 @@ body {
.footer {
font-size: 13px;
line-height: 20px;
+ border-top: 2px solid #36b5b2;
+ padding-top: 20px;
.title {
font-weight: bold;
margin-bottom: 6px;
@@ -122,11 +127,23 @@ ul.sharing {
&.release-group {
background-color: fade(@rg-color, 70%);
}
+ &.place {
+ background-color: fade(@place-color, 70%);
+ }
&.event {
background-color: fade(@event-color, 70%);
}
- &.place {
- background-color: fade(@place-color, 70%);
+ &.artist {
+ background-color: fade(@artist-color, 70%);
+ }
+ &.work {
+ background-color: fade(@work-color, 70%);
+ }
+ &.label {
+ background-color: fade(@label-color, 70%);
+ }
+ &.event {
+ background-color: fade(@event-color, 70%);
}
}
@@ -150,9 +167,9 @@ ul.sharing {
margin-bottom: 20px;
}
#stats {
- margin-top: 10px;
+ margin-top: 30px;
@media (min-width: @grid-float-breakpoint) {
- margin-top: 60px;
+ margin-top: 10px;
margin-bottom: 0px;
}
}
@@ -262,6 +279,7 @@ ul.sharing {
}
img {
+ z-index: 0;
display: block;
margin-left: auto;
margin-right: auto;
@@ -277,10 +295,12 @@ ul.sharing {
font-weight: bold;
font-size: small;
color: #fff;
+ z-index: 2;
}
.caption {
.release-group-title {
+ z-index: 2;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@@ -418,6 +438,11 @@ a#edit-review { margin-top: 20px; }
}
}
}
+
+ // comments
+ .comment-header {
+ margin: 15px auto;
+ }
}
@@ -476,11 +501,23 @@ a#edit-review { margin-top: 20px; }
&.release-group {
background-color: fade(@rg-color, 70%);
}
+ &.place {
+ background-color: fade(@place-color, 70%);
+ }
&.event {
background-color: fade(@event-color, 70%);
}
- &.place {
- background-color: fade(@place-color, 70%);
+ &.artist {
+ background-color: fade(@artist-color, 70%);
+ }
+ &.work {
+ background-color: fade(@work-color, 70%);
+ }
+ &.label {
+ background-color: fade(@label-color, 70%);
+ }
+ &.event {
+ background-color: fade(@event-color, 70%);
}
}
}
@@ -540,6 +577,11 @@ a#edit-review { margin-top: 20px; }
}
}
+nav {
+ .glyphicon {
+ padding-right: 5px;
+ }
+}
// Rating star icons
.glyphicon-star, .glyphicon-star-empty {
diff --git a/critiquebrainz/frontend/static_manager.py b/critiquebrainz/frontend/static_manager.py
index 08678add1..04ceb67b4 100644
--- a/critiquebrainz/frontend/static_manager.py
+++ b/critiquebrainz/frontend/static_manager.py
@@ -1,7 +1,7 @@
-import os.path
import json
+import os.path
-MANIFEST_PATH = os.path.join(os.path.dirname(__file__), "static", "build", "rev-manifest.json")
+MANIFEST_PATH = os.path.join(os.path.dirname(__file__), "static", "build", "manifest.json")
manifest_content = {}
diff --git a/critiquebrainz/frontend/templates/artist/entity.html b/critiquebrainz/frontend/templates/artist/entity.html
index 636dcae1a..5586a3e34 100644
--- a/critiquebrainz/frontend/templates/artist/entity.html
+++ b/critiquebrainz/frontend/templates/artist/entity.html
@@ -39,15 +39,66 @@
{% endmacro %}
{% block content %}
-
- {{ artist.name }}
- {% if artist.disambiguation %}
- ({{ artist.disambiguation }})
+
+
{{ _('Reviews') }}
+ {% if not reviews %}
+
{{ _('No reviews found') }}
+ {% else %}
+
+
+
+
+ {% endif %}
+
{{ _('Discography') }}
- {% if count==0 %}
+ {% if release_group_count==0 %}
{{ _('No releases found') }}
{% else %}
@@ -95,13 +146,13 @@
{{ _('Discography') }}
{% endfor %}
- {% if count > limit %}
+ {% if release_group_count > release_groups_limit %}
@@ -109,6 +160,7 @@
{{ _('Discography') }}
{% endif %}
{% endif %}
+
diff --git a/critiquebrainz/frontend/templates/entity_review.html b/critiquebrainz/frontend/templates/entity_review.html
index df9168e81..c41108387 100644
--- a/critiquebrainz/frontend/templates/entity_review.html
+++ b/critiquebrainz/frontend/templates/entity_review.html
@@ -3,9 +3,15 @@
{{ _('%(album)s by %(artist)s',
album = '
'|safe + entity.title | default(_('[Unknown release group]')) + ''|safe,
artist = entity['artist-credit-phrase'] | default(_('[Unknown artist]'))) }}
+ {% elif review.entity_type == 'artist' %}
+ {{ _('%(artist)s', artist = '
'|safe + entity.name | default(_('[Unknown artist]')) + ''|safe) }}
+ {% elif review.entity_type == 'label' %}
+ {{ _('%(label)s', label = '
'|safe + entity.name | default(_('[Unknown label]')) + ''|safe) }}
{% elif review.entity_type == 'event' %}
{{ _('%(event)s', event = '
'|safe + entity.name | default(_('[Unknown event]')) + ''|safe) }}
{% elif review.entity_type == 'place' %}
{{ _('%(place)s', place = '
'|safe + entity.name | default(_('[Unknown place]')) + ''|safe) }}
+ {% elif review.entity_type == 'work' %}
+ {{ _('%(work)s', work = '
'|safe + entity.name | default(_('[Unknown work]')) + ''|safe) }}
{% endif %}
diff --git a/critiquebrainz/frontend/templates/event/entity.html b/critiquebrainz/frontend/templates/event/entity.html
index 34e5a43c5..862703d7c 100644
--- a/critiquebrainz/frontend/templates/event/entity.html
+++ b/critiquebrainz/frontend/templates/event/entity.html
@@ -15,7 +15,7 @@
{% if not my_review %}
-
{{ _('Write a review') }}
diff --git a/critiquebrainz/frontend/templates/footer.html b/critiquebrainz/frontend/templates/footer.html
index 689238beb..1f491712a 100644
--- a/critiquebrainz/frontend/templates/footer.html
+++ b/critiquebrainz/frontend/templates/footer.html
@@ -9,7 +9,6 @@
{% endif %}
-
-
- {{ comment_credit(comment, user_picture_size=24) }} - {% if current_user.is_authenticated and current_user == comment.user %} - - - - {% endif %} - - {{comment.created | date}} - -
-{{ comment.text_html|safe }}
-