From b5202636e14ed394b0daae4e9107ea45c998f098 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Espino?= Date: Wed, 15 Jun 2016 18:06:12 +0200 Subject: [PATCH 1/5] Improve performance on exports --- taiga/export_import/serializers.py | 50 ++++++++++++++++++++------ taiga/export_import/services/render.py | 9 +++-- 2 files changed, 46 insertions(+), 13 deletions(-) diff --git a/taiga/export_import/serializers.py b/taiga/export_import/serializers.py index 43acb5af..16877ea7 100644 --- a/taiga/export_import/serializers.py +++ b/taiga/export_import/serializers.py @@ -50,6 +50,28 @@ from taiga.projects.notifications import services as notifications_services from taiga.projects.votes import services as votes_service from taiga.projects.history import services as history_service +_cache_user_by_pk = {} +_cache_user_by_email = {} +_custom_tasks_attributes_cache = {} +_custom_issues_attributes_cache = {} +_custom_userstories_attributes_cache = {} + +def cached_get_user_by_pk(pk): + if pk not in _cache_user_by_pk: + try: + _cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk) + except Exception: + _cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk) + return _cache_user_by_pk[pk] + +def cached_get_user_by_email(email): + if email not in _cache_user_by_email: + try: + _cache_user_by_email[email] = users_models.User.objects.get(email=email) + except Exception: + _cache_user_by_email[email] = users_models.User.objects.get(email=email) + return _cache_user_by_email[email] + class FileField(serializers.WritableField): read_only = False @@ -128,7 +150,7 @@ class UserRelatedField(RelatedNoneSafeField): def from_native(self, data): try: - return users_models.User.objects.get(email=data) + return cached_get_user_by_email(data) except users_models.User.DoesNotExist: return None @@ -138,14 +160,14 @@ class UserPkField(serializers.RelatedField): def to_native(self, obj): try: - user = users_models.User.objects.get(pk=obj) + user = cached_get_user_by_pk(obj) return user.email except users_models.User.DoesNotExist: return None def from_native(self, data): try: - user = users_models.User.objects.get(email=data) + user = cached_get_user_by_email(data) return user.pk except users_models.User.DoesNotExist: return None @@ -185,7 +207,7 @@ class HistoryUserField(JsonField): if obj is None or obj == {}: return [] try: - user = users_models.User.objects.get(pk=obj['pk']) + user = cached_get_user_by_pk(obj['pk']) except users_models.User.DoesNotExist: user = None return (UserRelatedField().to_native(user), obj['name']) @@ -420,7 +442,7 @@ class CustomAttributesValuesExportSerializerMixin(serializers.ModelSerializer): try: values = obj.custom_attributes_values.attributes_values - custom_attributes = self.custom_attributes_queryset(obj.project).values('id', 'name') + custom_attributes = self.custom_attributes_queryset(obj.project) return _use_name_instead_id_as_key_in_custom_attributes_values(custom_attributes, values) except ObjectDoesNotExist: @@ -550,7 +572,9 @@ class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryE exclude = ('id', 'project') def custom_attributes_queryset(self, project): - return project.taskcustomattributes.all() + if project.id not in _custom_tasks_attributes_cache: + _custom_tasks_attributes_cache[project.id] = list(project.taskcustomattributes.all().values('id', 'name')) + return _custom_tasks_attributes_cache[project.id] class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin, @@ -568,7 +592,9 @@ class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin, His exclude = ('id', 'project', 'points', 'tasks') def custom_attributes_queryset(self, project): - return project.userstorycustomattributes.all() + if project.id not in _custom_userstories_attributes_cache: + _custom_userstories_attributes_cache[project.id] = list(project.userstorycustomattributes.all().values('id', 'name')) + return _custom_userstories_attributes_cache[project.id] class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin, @@ -591,7 +617,9 @@ class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin, History return [x.email for x in votes_service.get_voters(obj)] def custom_attributes_queryset(self, project): - return project.issuecustomattributes.all() + if project.id not in _custom_issues_attributes_cache: + _custom_issues_attributes_cache[project.id] = list(project.issuecustomattributes.all().values('id', 'name')) + return _custom_issues_attributes_cache[project.id] class WikiPageExportSerializer(HistoryExportSerializerMixin, AttachmentExportSerializerMixin, @@ -618,17 +646,17 @@ class TimelineDataField(serializers.WritableField): def to_native(self, data): new_data = copy.deepcopy(data) try: - user = users_models.User.objects.get(pk=new_data["user"]["id"]) + user = cached_get_user_by_pk(new_data["user"]["id"]) new_data["user"]["email"] = user.email del new_data["user"]["id"] - except users_models.User.DoesNotExist: + except Exception: pass return new_data def from_native(self, data): new_data = copy.deepcopy(data) try: - user = users_models.User.objects.get(email=new_data["user"]["email"]) + user = cached_get_user_by_email(new_data["user"]["email"]) new_data["user"]["id"] = user.id del new_data["user"]["email"] except users_models.User.DoesNotExist: diff --git a/taiga/export_import/services/render.py b/taiga/export_import/services/render.py index cc4f8edf..19015878 100644 --- a/taiga/export_import/services/render.py +++ b/taiga/export_import/services/render.py @@ -50,6 +50,12 @@ def render_project(project, outfile, chunk_size = 8190): # These four "special" fields hava attachments so we use them in a special way if field_name in ["wiki_pages", "user_stories", "tasks", "issues"]: value = get_component(project, field_name) + if field_name != "wiki_pages": + value = value.select_related('owner', 'status', 'milestone', 'project', 'assigned_to', 'custom_attributes_values') + if field_name == "issues": + value = value.select_related('severity', 'priority', 'type') + value = value.prefetch_related('history_entry', 'attachments') + outfile.write('"{}": [\n'.format(field_name)) attachments_field = field.fields.pop("attachments", None) @@ -101,9 +107,8 @@ def render_project(project, outfile, chunk_size = 8190): outfile.write(']}') outfile.flush() - gc.collect() + gc.collect() outfile.write(']') - else: value = field.field_to_native(project, field_name) outfile.write('"{}": {}'.format(field_name, json.dumps(value))) From 1336f5c8e92ddd5f5a0d756293da0ccad23f685c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Espino?= Date: Wed, 15 Jun 2016 20:06:46 +0200 Subject: [PATCH 2/5] Add gzip support to exports --- taiga/export_import/api.py | 21 +++++++--- .../management/commands/dump_project.py | 19 +++++++-- taiga/export_import/services/render.py | 32 +++++++------- taiga/export_import/tasks.py | 27 ++++++++---- tests/integration/test_exporter_api.py | 42 ++++++++++++++++++- tests/unit/test_export.py | 4 +- 6 files changed, 109 insertions(+), 36 deletions(-) diff --git a/taiga/export_import/api.py b/taiga/export_import/api.py index bf5cf1a9..4e6f87e0 100644 --- a/taiga/export_import/api.py +++ b/taiga/export_import/api.py @@ -18,6 +18,7 @@ import codecs import uuid +import gzip from django.utils.decorators import method_decorator from django.utils.translation import ugettext as _ @@ -64,16 +65,24 @@ class ProjectExporterViewSet(mixins.ImportThrottlingPolicyMixin, GenericViewSet) project = get_object_or_404(self.get_queryset(), pk=pk) self.check_permissions(request, 'export_project', project) + dump_format = request.QUERY_PARAMS.get("dump_format", None) + if settings.CELERY_ENABLED: - task = tasks.dump_project.delay(request.user, project) - tasks.delete_project_dump.apply_async((project.pk, project.slug, task.id), + task = tasks.dump_project.delay(request.user, project, dump_format) + tasks.delete_project_dump.apply_async((project.pk, project.slug, task.id, dump_format), countdown=settings.EXPORTS_TTL) return response.Accepted({"export_id": task.id}) - path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex) - storage_path = default_storage.path(path) - with default_storage.open(storage_path, mode="w") as outfile: - services.render_project(project, outfile) + if dump_format == "gzip": + path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, uuid.uuid4().hex) + storage_path = default_storage.path(path) + with default_storage.open(storage_path, mode="wb") as outfile: + services.render_project(project, gzip.GzipFile(fileobj=outfile)) + else: + path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex) + storage_path = default_storage.path(path) + with default_storage.open(storage_path, mode="wb") as outfile: + services.render_project(project, outfile) response_data = { "url": default_storage.url(path) diff --git a/taiga/export_import/management/commands/dump_project.py b/taiga/export_import/management/commands/dump_project.py index 3b3ceaf6..3c6996f3 100644 --- a/taiga/export_import/management/commands/dump_project.py +++ b/taiga/export_import/management/commands/dump_project.py @@ -22,6 +22,7 @@ from taiga.projects.models import Project from taiga.export_import.services import render_project import os +import gzip class Command(BaseCommand): @@ -39,6 +40,13 @@ class Command(BaseCommand): metavar="DIR", help="Directory to save the json files. ('./' by default)") + parser.add_argument("-f", "--format", + action="store", + dest="format", + default="plain", + metavar="[plain|gzip]", + help="Format to the output file plain json or gzipped json. ('plain' by default)") + def handle(self, *args, **options): dst_dir = options["dst_dir"] @@ -56,8 +64,13 @@ class Command(BaseCommand): except Project.DoesNotExist: raise CommandError("Project '{}' does not exist".format(project_slug)) - dst_file = os.path.join(dst_dir, "{}.json".format(project_slug)) - with open(dst_file, "w") as f: - render_project(project, f) + if options["format"] == "gzip": + dst_file = os.path.join(dst_dir, "{}.json.gz".format(project_slug)) + with gzip.GzipFile(dst_file, "wb") as f: + render_project(project, f) + else: + dst_file = os.path.join(dst_dir, "{}.json".format(project_slug)) + with open(dst_file, "wb") as f: + render_project(project, f) print("-> Generate dump of project '{}' in '{}'".format(project.name, dst_file)) diff --git a/taiga/export_import/services/render.py b/taiga/export_import/services/render.py index 19015878..923647a7 100644 --- a/taiga/export_import/services/render.py +++ b/taiga/export_import/services/render.py @@ -34,13 +34,13 @@ from .. import serializers def render_project(project, outfile, chunk_size = 8190): serializer = serializers.ProjectExportSerializer(project) - outfile.write('{\n') + outfile.write(b'{\n') first_field = True for field_name in serializer.fields.keys(): # Avoid writing "," in the last element if not first_field: - outfile.write(",\n") + outfile.write(b",\n") else: first_field = False @@ -56,7 +56,7 @@ def render_project(project, outfile, chunk_size = 8190): value = value.select_related('severity', 'priority', 'type') value = value.prefetch_related('history_entry', 'attachments') - outfile.write('"{}": [\n'.format(field_name)) + outfile.write('"{}": [\n'.format(field_name).encode()) attachments_field = field.fields.pop("attachments", None) if attachments_field: @@ -66,20 +66,20 @@ def render_project(project, outfile, chunk_size = 8190): for item in value.iterator(): # Avoid writing "," in the last element if not first_item: - outfile.write(",\n") + outfile.write(b",\n") else: first_item = False dumped_value = json.dumps(field.to_native(item)) writing_value = dumped_value[:-1]+ ',\n "attachments": [\n' - outfile.write(writing_value) + outfile.write(writing_value.encode()) first_attachment = True for attachment in item.attachments.iterator(): # Avoid writing "," in the last element if not first_attachment: - outfile.write(",\n") + outfile.write(b",\n") else: first_attachment = False @@ -88,7 +88,7 @@ def render_project(project, outfile, chunk_size = 8190): attached_file_serializer = attachment_serializer.fields.pop("attached_file") dumped_value = json.dumps(attachment_serializer.data) dumped_value = dumped_value[:-1] + ',\n "attached_file":{\n "data":"' - outfile.write(dumped_value) + outfile.write(dumped_value.encode()) # We write the attached_files by chunks so the memory used is not increased attachment_file = attachment.attached_file @@ -99,32 +99,32 @@ def render_project(project, outfile, chunk_size = 8190): if not bin_data: break - b64_data = base64.b64encode(bin_data).decode('utf-8') + b64_data = base64.b64encode(bin_data) outfile.write(b64_data) outfile.write('", \n "name":"{}"}}\n}}'.format( - os.path.basename(attachment_file.name))) + os.path.basename(attachment_file.name)).encode()) - outfile.write(']}') + outfile.write(b']}') outfile.flush() gc.collect() - outfile.write(']') + outfile.write(b']') else: value = field.field_to_native(project, field_name) - outfile.write('"{}": {}'.format(field_name, json.dumps(value))) + outfile.write('"{}": {}'.format(field_name, json.dumps(value)).encode()) # Generate the timeline - outfile.write(',\n"timeline": [\n') + outfile.write(b',\n"timeline": [\n') first_timeline = True for timeline_item in get_project_timeline(project).iterator(): # Avoid writing "," in the last element if not first_timeline: - outfile.write(",\n") + outfile.write(b",\n") else: first_timeline = False dumped_value = json.dumps(serializers.TimelineExportSerializer(timeline_item).data) - outfile.write(dumped_value) + outfile.write(dumped_value.encode()) - outfile.write(']}\n') + outfile.write(b']}\n') diff --git a/taiga/export_import/tasks.py b/taiga/export_import/tasks.py index 8ba61645..aa75c257 100644 --- a/taiga/export_import/tasks.py +++ b/taiga/export_import/tasks.py @@ -19,6 +19,7 @@ import datetime import logging import sys +import gzip from django.core.files.storage import default_storage from django.core.files.base import ContentFile @@ -41,14 +42,20 @@ import resource @app.task(bind=True) -def dump_project(self, user, project): - path = "exports/{}/{}-{}.json".format(project.pk, project.slug, self.request.id) - storage_path = default_storage.path(path) - +def dump_project(self, user, project, dump_format): try: + if dump_format == "gzip": + path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, self.request.id) + storage_path = default_storage.path(path) + with default_storage.open(storage_path, mode="wb") as outfile: + services.render_project(project, gzip.GzipFile(fileobj=outfile)) + else: + path = "exports/{}/{}-{}.json".format(project.pk, project.slug, self.request.id) + storage_path = default_storage.path(path) + with default_storage.open(storage_path, mode="wb") as outfile: + services.render_project(project, outfile) + url = default_storage.url(path) - with default_storage.open(storage_path, mode="w") as outfile: - services.render_project(project, outfile) except Exception: # Error @@ -75,8 +82,12 @@ def dump_project(self, user, project): @app.task -def delete_project_dump(project_id, project_slug, task_id): - default_storage.delete("exports/{}/{}-{}.json".format(project_id, project_slug, task_id)) +def delete_project_dump(project_id, project_slug, task_id, dump_format): + if dump_format == "gzip": + path = "exports/{}/{}-{}.json.gz".format(project_id, project_slug, task_id) + else: + path = "exports/{}/{}-{}.json".format(project_id, project_slug, task_id) + default_storage.delete(path) ADMIN_ERROR_LOAD_PROJECT_DUMP_MESSAGE = _(""" diff --git a/tests/integration/test_exporter_api.py b/tests/integration/test_exporter_api.py index c8727ae8..5ea4dd0e 100644 --- a/tests/integration/test_exporter_api.py +++ b/tests/integration/test_exporter_api.py @@ -53,6 +53,24 @@ def test_valid_project_export_with_celery_disabled(client, settings): assert response.status_code == 200 response_data = response.data assert "url" in response_data + assert response_data["url"].endswith(".json") + + +def test_valid_project_export_with_celery_disabled_and_gzip(client, settings): + settings.CELERY_ENABLED = False + + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("exporter-detail", args=[project.pk]) + + response = client.get(url+"?dump_format=gzip", content_type="application/json") + assert response.status_code == 200 + response_data = response.data + assert "url" in response_data + assert response_data["url"].endswith(".gz") def test_valid_project_export_with_celery_enabled(client, settings): @@ -72,7 +90,29 @@ def test_valid_project_export_with_celery_enabled(client, settings): response_data = response.data assert "export_id" in response_data - args = (project.id, project.slug, response_data["export_id"],) + args = (project.id, project.slug, response_data["export_id"], None) + kwargs = {"countdown": settings.EXPORTS_TTL} + delete_project_dump_mock.apply_async.assert_called_once_with(args, **kwargs) + + +def test_valid_project_export_with_celery_enabled_and_gzip(client, settings): + settings.CELERY_ENABLED = True + + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("exporter-detail", args=[project.pk]) + + #delete_project_dump task should have been launched + with mock.patch('taiga.export_import.tasks.delete_project_dump') as delete_project_dump_mock: + response = client.get(url+"?dump_format=gzip", content_type="application/json") + assert response.status_code == 202 + response_data = response.data + assert "export_id" in response_data + + args = (project.id, project.slug, response_data["export_id"], "gzip") kwargs = {"countdown": settings.EXPORTS_TTL} delete_project_dump_mock.apply_async.assert_called_once_with(args, **kwargs) diff --git a/tests/unit/test_export.py b/tests/unit/test_export.py index 546814a8..a8ce775f 100644 --- a/tests/unit/test_export.py +++ b/tests/unit/test_export.py @@ -28,7 +28,7 @@ pytestmark = pytest.mark.django_db def test_export_issue_finish_date(client): issue = f.IssueFactory.create(finished_date="2014-10-22") - output = io.StringIO() + output = io.BytesIO() render_project(issue.project, output) project_data = json.loads(output.getvalue()) finish_date = project_data["issues"][0]["finished_date"] @@ -37,7 +37,7 @@ def test_export_issue_finish_date(client): def test_export_user_story_finish_date(client): user_story = f.UserStoryFactory.create(finish_date="2014-10-22") - output = io.StringIO() + output = io.BytesIO() render_project(user_story.project, output) project_data = json.loads(output.getvalue()) finish_date = project_data["user_stories"][0]["finish_date"] From 1fa5a12d061346240e30fb59a4389797476cab3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Espino?= Date: Fri, 17 Jun 2016 09:33:12 +0200 Subject: [PATCH 3/5] Reestructure serializers module --- taiga/export_import/serializers/__init__.py | 45 ++ taiga/export_import/serializers/cache.py | 42 ++ taiga/export_import/serializers/fields.py | 250 ++++++++++++ taiga/export_import/serializers/mixins.py | 141 +++++++ .../{ => serializers}/serializers.py | 384 +----------------- 5 files changed, 495 insertions(+), 367 deletions(-) create mode 100644 taiga/export_import/serializers/__init__.py create mode 100644 taiga/export_import/serializers/cache.py create mode 100644 taiga/export_import/serializers/fields.py create mode 100644 taiga/export_import/serializers/mixins.py rename taiga/export_import/{ => serializers}/serializers.py (52%) diff --git a/taiga/export_import/serializers/__init__.py b/taiga/export_import/serializers/__init__.py new file mode 100644 index 00000000..5d793a87 --- /dev/null +++ b/taiga/export_import/serializers/__init__.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from .serializers import PointsExportSerializer +from .serializers import UserStoryStatusExportSerializer +from .serializers import TaskStatusExportSerializer +from .serializers import IssueStatusExportSerializer +from .serializers import PriorityExportSerializer +from .serializers import SeverityExportSerializer +from .serializers import IssueTypeExportSerializer +from .serializers import RoleExportSerializer +from .serializers import UserStoryCustomAttributeExportSerializer +from .serializers import TaskCustomAttributeExportSerializer +from .serializers import IssueCustomAttributeExportSerializer +from .serializers import BaseCustomAttributesValuesExportSerializer +from .serializers import UserStoryCustomAttributesValuesExportSerializer +from .serializers import TaskCustomAttributesValuesExportSerializer +from .serializers import IssueCustomAttributesValuesExportSerializer +from .serializers import MembershipExportSerializer +from .serializers import RolePointsExportSerializer +from .serializers import MilestoneExportSerializer +from .serializers import TaskExportSerializer +from .serializers import UserStoryExportSerializer +from .serializers import IssueExportSerializer +from .serializers import WikiPageExportSerializer +from .serializers import WikiLinkExportSerializer +from .serializers import TimelineExportSerializer +from .serializers import ProjectExportSerializer +from .mixins import AttachmentExportSerializer +from .mixins import HistoryExportSerializer diff --git a/taiga/export_import/serializers/cache.py b/taiga/export_import/serializers/cache.py new file mode 100644 index 00000000..c4eb5bfa --- /dev/null +++ b/taiga/export_import/serializers/cache.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from taiga.users import models as users_models + +_cache_user_by_pk = {} +_cache_user_by_email = {} +_custom_tasks_attributes_cache = {} +_custom_issues_attributes_cache = {} +_custom_userstories_attributes_cache = {} + + +def cached_get_user_by_pk(pk): + if pk not in _cache_user_by_pk: + try: + _cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk) + except Exception: + _cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk) + return _cache_user_by_pk[pk] + +def cached_get_user_by_email(email): + if email not in _cache_user_by_email: + try: + _cache_user_by_email[email] = users_models.User.objects.get(email=email) + except Exception: + _cache_user_by_email[email] = users_models.User.objects.get(email=email) + return _cache_user_by_email[email] diff --git a/taiga/export_import/serializers/fields.py b/taiga/export_import/serializers/fields.py new file mode 100644 index 00000000..f2ca8841 --- /dev/null +++ b/taiga/export_import/serializers/fields.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import base64 +import os +import copy +from collections import OrderedDict + +from django.core.files.base import ContentFile +from django.core.exceptions import ObjectDoesNotExist +from django.core.exceptions import ValidationError +from django.utils.translation import ugettext as _ + +from taiga.base.api import serializers +from taiga.base.fields import JsonField +from taiga.mdrender.service import render as mdrender +from taiga.users import models as users_models + +from .cache import cached_get_user_by_email, cached_get_user_by_pk + + +class FileField(serializers.WritableField): + read_only = False + + def to_native(self, obj): + if not obj: + return None + + data = base64.b64encode(obj.read()).decode('utf-8') + + return OrderedDict([ + ("data", data), + ("name", os.path.basename(obj.name)), + ]) + + def from_native(self, data): + if not data: + return None + + decoded_data = b'' + # The original file was encoded by chunks but we don't really know its + # length or if it was multiple of 3 so we must iterate over all those chunks + # decoding them one by one + for decoding_chunk in data['data'].split("="): + # When encoding to base64 3 bytes are transformed into 4 bytes and + # the extra space of the block is filled with = + # We must ensure that the decoding chunk has a length multiple of 4 so + # we restore the stripped '='s adding appending them until the chunk has + # a length multiple of 4 + decoding_chunk += "=" * (-len(decoding_chunk) % 4) + decoded_data += base64.b64decode(decoding_chunk+"=") + + return ContentFile(decoded_data, name=data['name']) + + +class RelatedNoneSafeField(serializers.RelatedField): + def field_from_native(self, data, files, field_name, into): + if self.read_only: + return + + try: + if self.many: + try: + # Form data + value = data.getlist(field_name) + if value == [''] or value == []: + raise KeyError + except AttributeError: + # Non-form data + value = data[field_name] + else: + value = data[field_name] + except KeyError: + if self.partial: + return + value = self.get_default_value() + + key = self.source or field_name + if value in self.null_values: + if self.required: + raise ValidationError(self.error_messages['required']) + into[key] = None + elif self.many: + into[key] = [self.from_native(item) for item in value if self.from_native(item) is not None] + else: + into[key] = self.from_native(value) + + +class UserRelatedField(RelatedNoneSafeField): + read_only = False + + def to_native(self, obj): + if obj: + return obj.email + return None + + def from_native(self, data): + try: + return cached_get_user_by_email(data) + except users_models.User.DoesNotExist: + return None + + +class UserPkField(serializers.RelatedField): + read_only = False + + def to_native(self, obj): + try: + user = cached_get_user_by_pk(obj) + return user.email + except users_models.User.DoesNotExist: + return None + + def from_native(self, data): + try: + user = cached_get_user_by_email(data) + return user.pk + except users_models.User.DoesNotExist: + return None + + +class CommentField(serializers.WritableField): + read_only = False + + def field_from_native(self, data, files, field_name, into): + super().field_from_native(data, files, field_name, into) + into["comment_html"] = mdrender(self.context['project'], data.get("comment", "")) + + +class ProjectRelatedField(serializers.RelatedField): + read_only = False + null_values = (None, "") + + def __init__(self, slug_field, *args, **kwargs): + self.slug_field = slug_field + super().__init__(*args, **kwargs) + + def to_native(self, obj): + if obj: + return getattr(obj, self.slug_field) + return None + + def from_native(self, data): + try: + kwargs = {self.slug_field: data, "project": self.context['project']} + return self.queryset.get(**kwargs) + except ObjectDoesNotExist: + raise ValidationError(_("{}=\"{}\" not found in this project".format(self.slug_field, data))) + + +class HistoryUserField(JsonField): + def to_native(self, obj): + if obj is None or obj == {}: + return [] + try: + user = cached_get_user_by_pk(obj['pk']) + except users_models.User.DoesNotExist: + user = None + return (UserRelatedField().to_native(user), obj['name']) + + def from_native(self, data): + if data is None: + return {} + + if len(data) < 2: + return {} + + user = UserRelatedField().from_native(data[0]) + + if user: + pk = user.pk + else: + pk = None + + return {"pk": pk, "name": data[1]} + + +class HistoryValuesField(JsonField): + def to_native(self, obj): + if obj is None: + return [] + if "users" in obj: + obj['users'] = list(map(UserPkField().to_native, obj['users'])) + return obj + + def from_native(self, data): + if data is None: + return [] + if "users" in data: + data['users'] = list(map(UserPkField().from_native, data['users'])) + return data + + +class HistoryDiffField(JsonField): + def to_native(self, obj): + if obj is None: + return [] + + if "assigned_to" in obj: + obj['assigned_to'] = list(map(UserPkField().to_native, obj['assigned_to'])) + + return obj + + def from_native(self, data): + if data is None: + return [] + + if "assigned_to" in data: + data['assigned_to'] = list(map(UserPkField().from_native, data['assigned_to'])) + return data + + +class TimelineDataField(serializers.WritableField): + read_only = False + + def to_native(self, data): + new_data = copy.deepcopy(data) + try: + user = cached_get_user_by_pk(new_data["user"]["id"]) + new_data["user"]["email"] = user.email + del new_data["user"]["id"] + except Exception: + pass + return new_data + + def from_native(self, data): + new_data = copy.deepcopy(data) + try: + user = cached_get_user_by_email(new_data["user"]["email"]) + new_data["user"]["id"] = user.id + del new_data["user"]["email"] + except users_models.User.DoesNotExist: + pass + + return new_data diff --git a/taiga/export_import/serializers/mixins.py b/taiga/export_import/serializers/mixins.py new file mode 100644 index 00000000..007649a2 --- /dev/null +++ b/taiga/export_import/serializers/mixins.py @@ -0,0 +1,141 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from django.contrib.auth import get_user_model +from django.core.exceptions import ObjectDoesNotExist +from django.contrib.contenttypes.models import ContentType + +from taiga.base.api import serializers +from taiga.projects.history import models as history_models +from taiga.projects.attachments import models as attachments_models +from taiga.projects.notifications import services as notifications_services +from taiga.projects.history import services as history_service + +from .fields import (UserRelatedField, HistoryUserField, HistoryDiffField, + JsonField, HistoryValuesField, CommentField, FileField) + + +class HistoryExportSerializer(serializers.ModelSerializer): + user = HistoryUserField() + diff = HistoryDiffField(required=False) + snapshot = JsonField(required=False) + values = HistoryValuesField(required=False) + comment = CommentField(required=False) + delete_comment_date = serializers.DateTimeField(required=False) + delete_comment_user = HistoryUserField(required=False) + + class Meta: + model = history_models.HistoryEntry + exclude = ("id", "comment_html", "key") + + +class HistoryExportSerializerMixin(serializers.ModelSerializer): + history = serializers.SerializerMethodField("get_history") + + def get_history(self, obj): + history_qs = history_service.get_history_queryset_by_model_instance(obj, + types=(history_models.HistoryType.change, history_models.HistoryType.create,)) + + return HistoryExportSerializer(history_qs, many=True).data + + +class AttachmentExportSerializer(serializers.ModelSerializer): + owner = UserRelatedField(required=False) + attached_file = FileField() + modified_date = serializers.DateTimeField(required=False) + + class Meta: + model = attachments_models.Attachment + exclude = ('id', 'content_type', 'object_id', 'project') + + +class AttachmentExportSerializerMixin(serializers.ModelSerializer): + attachments = serializers.SerializerMethodField("get_attachments") + + def get_attachments(self, obj): + content_type = ContentType.objects.get_for_model(obj.__class__) + attachments_qs = attachments_models.Attachment.objects.filter(object_id=obj.pk, + content_type=content_type) + return AttachmentExportSerializer(attachments_qs, many=True).data + + +class CustomAttributesValuesExportSerializerMixin(serializers.ModelSerializer): + custom_attributes_values = serializers.SerializerMethodField("get_custom_attributes_values") + + def custom_attributes_queryset(self, project): + raise NotImplementedError() + + def get_custom_attributes_values(self, obj): + def _use_name_instead_id_as_key_in_custom_attributes_values(custom_attributes, values): + ret = {} + for attr in custom_attributes: + value = values.get(str(attr["id"]), None) + if value is not None: + ret[attr["name"]] = value + + return ret + + try: + values = obj.custom_attributes_values.attributes_values + custom_attributes = self.custom_attributes_queryset(obj.project) + + return _use_name_instead_id_as_key_in_custom_attributes_values(custom_attributes, values) + except ObjectDoesNotExist: + return None + + +class WatcheableObjectModelSerializerMixin(serializers.ModelSerializer): + watchers = UserRelatedField(many=True, required=False) + + def __init__(self, *args, **kwargs): + self._watchers_field = self.base_fields.pop("watchers", None) + super(WatcheableObjectModelSerializerMixin, self).__init__(*args, **kwargs) + + """ + watchers is not a field from the model so we need to do some magic to make it work like a normal field + It's supposed to be represented as an email list but internally it's treated like notifications.Watched instances + """ + + def restore_object(self, attrs, instance=None): + watcher_field = self.fields.pop("watchers", None) + instance = super(WatcheableObjectModelSerializerMixin, self).restore_object(attrs, instance) + self._watchers = self.init_data.get("watchers", []) + return instance + + def save_watchers(self): + new_watcher_emails = set(self._watchers) + old_watcher_emails = set(self.object.get_watchers().values_list("email", flat=True)) + adding_watcher_emails = list(new_watcher_emails.difference(old_watcher_emails)) + removing_watcher_emails = list(old_watcher_emails.difference(new_watcher_emails)) + + User = get_user_model() + adding_users = User.objects.filter(email__in=adding_watcher_emails) + removing_users = User.objects.filter(email__in=removing_watcher_emails) + + for user in adding_users: + notifications_services.add_watcher(self.object, user) + + for user in removing_users: + notifications_services.remove_watcher(self.object, user) + + self.object.watchers = [user.email for user in self.object.get_watchers()] + + def to_native(self, obj): + ret = super(WatcheableObjectModelSerializerMixin, self).to_native(obj) + ret["watchers"] = [user.email for user in obj.get_watchers()] + return ret diff --git a/taiga/export_import/serializers.py b/taiga/export_import/serializers/serializers.py similarity index 52% rename from taiga/export_import/serializers.py rename to taiga/export_import/serializers/serializers.py index 16877ea7..e7a2af76 100644 --- a/taiga/export_import/serializers.py +++ b/taiga/export_import/serializers/serializers.py @@ -16,25 +16,14 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import base64 import copy -import os -from collections import OrderedDict -from django.apps import apps -from django.contrib.auth import get_user_model -from django.core.files.base import ContentFile -from django.core.exceptions import ObjectDoesNotExist from django.core.exceptions import ValidationError -from django.core.exceptions import ObjectDoesNotExist from django.utils.translation import ugettext as _ -from django.contrib.contenttypes.models import ContentType - from taiga.base.api import serializers from taiga.base.fields import JsonField, PgArrayField -from taiga.mdrender.service import render as mdrender from taiga.projects import models as projects_models from taiga.projects.custom_attributes import models as custom_attributes_models from taiga.projects.userstories import models as userstories_models @@ -46,308 +35,19 @@ from taiga.projects.history import models as history_models from taiga.projects.attachments import models as attachments_models from taiga.timeline import models as timeline_models from taiga.users import models as users_models -from taiga.projects.notifications import services as notifications_services from taiga.projects.votes import services as votes_service -from taiga.projects.history import services as history_service -_cache_user_by_pk = {} -_cache_user_by_email = {} -_custom_tasks_attributes_cache = {} -_custom_issues_attributes_cache = {} -_custom_userstories_attributes_cache = {} - -def cached_get_user_by_pk(pk): - if pk not in _cache_user_by_pk: - try: - _cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk) - except Exception: - _cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk) - return _cache_user_by_pk[pk] - -def cached_get_user_by_email(email): - if email not in _cache_user_by_email: - try: - _cache_user_by_email[email] = users_models.User.objects.get(email=email) - except Exception: - _cache_user_by_email[email] = users_models.User.objects.get(email=email) - return _cache_user_by_email[email] - - -class FileField(serializers.WritableField): - read_only = False - - def to_native(self, obj): - if not obj: - return None - - data = base64.b64encode(obj.read()).decode('utf-8') - - return OrderedDict([ - ("data", data), - ("name", os.path.basename(obj.name)), - ]) - - def from_native(self, data): - if not data: - return None - - decoded_data = b'' - # The original file was encoded by chunks but we don't really know its - # length or if it was multiple of 3 so we must iterate over all those chunks - # decoding them one by one - for decoding_chunk in data['data'].split("="): - # When encoding to base64 3 bytes are transformed into 4 bytes and - # the extra space of the block is filled with = - # We must ensure that the decoding chunk has a length multiple of 4 so - # we restore the stripped '='s adding appending them until the chunk has - # a length multiple of 4 - decoding_chunk += "=" * (-len(decoding_chunk) % 4) - decoded_data += base64.b64decode(decoding_chunk+"=") - - return ContentFile(decoded_data, name=data['name']) - - -class RelatedNoneSafeField(serializers.RelatedField): - def field_from_native(self, data, files, field_name, into): - if self.read_only: - return - - try: - if self.many: - try: - # Form data - value = data.getlist(field_name) - if value == [''] or value == []: - raise KeyError - except AttributeError: - # Non-form data - value = data[field_name] - else: - value = data[field_name] - except KeyError: - if self.partial: - return - value = self.get_default_value() - - key = self.source or field_name - if value in self.null_values: - if self.required: - raise ValidationError(self.error_messages['required']) - into[key] = None - elif self.many: - into[key] = [self.from_native(item) for item in value if self.from_native(item) is not None] - else: - into[key] = self.from_native(value) - - -class UserRelatedField(RelatedNoneSafeField): - read_only = False - - def to_native(self, obj): - if obj: - return obj.email - return None - - def from_native(self, data): - try: - return cached_get_user_by_email(data) - except users_models.User.DoesNotExist: - return None - - -class UserPkField(serializers.RelatedField): - read_only = False - - def to_native(self, obj): - try: - user = cached_get_user_by_pk(obj) - return user.email - except users_models.User.DoesNotExist: - return None - - def from_native(self, data): - try: - user = cached_get_user_by_email(data) - return user.pk - except users_models.User.DoesNotExist: - return None - - -class CommentField(serializers.WritableField): - read_only = False - - def field_from_native(self, data, files, field_name, into): - super().field_from_native(data, files, field_name, into) - into["comment_html"] = mdrender(self.context['project'], data.get("comment", "")) - - -class ProjectRelatedField(serializers.RelatedField): - read_only = False - null_values = (None, "") - - def __init__(self, slug_field, *args, **kwargs): - self.slug_field = slug_field - super().__init__(*args, **kwargs) - - def to_native(self, obj): - if obj: - return getattr(obj, self.slug_field) - return None - - def from_native(self, data): - try: - kwargs = {self.slug_field: data, "project": self.context['project']} - return self.queryset.get(**kwargs) - except ObjectDoesNotExist: - raise ValidationError(_("{}=\"{}\" not found in this project".format(self.slug_field, data))) - - -class HistoryUserField(JsonField): - def to_native(self, obj): - if obj is None or obj == {}: - return [] - try: - user = cached_get_user_by_pk(obj['pk']) - except users_models.User.DoesNotExist: - user = None - return (UserRelatedField().to_native(user), obj['name']) - - def from_native(self, data): - if data is None: - return {} - - if len(data) < 2: - return {} - - user = UserRelatedField().from_native(data[0]) - - if user: - pk = user.pk - else: - pk = None - - return {"pk": pk, "name": data[1]} - - -class HistoryValuesField(JsonField): - def to_native(self, obj): - if obj is None: - return [] - if "users" in obj: - obj['users'] = list(map(UserPkField().to_native, obj['users'])) - return obj - - def from_native(self, data): - if data is None: - return [] - if "users" in data: - data['users'] = list(map(UserPkField().from_native, data['users'])) - return data - - -class HistoryDiffField(JsonField): - def to_native(self, obj): - if obj is None: - return [] - - if "assigned_to" in obj: - obj['assigned_to'] = list(map(UserPkField().to_native, obj['assigned_to'])) - - return obj - - def from_native(self, data): - if data is None: - return [] - - if "assigned_to" in data: - data['assigned_to'] = list(map(UserPkField().from_native, data['assigned_to'])) - return data - - -class WatcheableObjectModelSerializer(serializers.ModelSerializer): - watchers = UserRelatedField(many=True, required=False) - - def __init__(self, *args, **kwargs): - self._watchers_field = self.base_fields.pop("watchers", None) - super(WatcheableObjectModelSerializer, self).__init__(*args, **kwargs) - - """ - watchers is not a field from the model so we need to do some magic to make it work like a normal field - It's supposed to be represented as an email list but internally it's treated like notifications.Watched instances - """ - - def restore_object(self, attrs, instance=None): - watcher_field = self.fields.pop("watchers", None) - instance = super(WatcheableObjectModelSerializer, self).restore_object(attrs, instance) - self._watchers = self.init_data.get("watchers", []) - return instance - - def save_watchers(self): - new_watcher_emails = set(self._watchers) - old_watcher_emails = set(self.object.get_watchers().values_list("email", flat=True)) - adding_watcher_emails = list(new_watcher_emails.difference(old_watcher_emails)) - removing_watcher_emails = list(old_watcher_emails.difference(new_watcher_emails)) - - User = get_user_model() - adding_users = User.objects.filter(email__in=adding_watcher_emails) - removing_users = User.objects.filter(email__in=removing_watcher_emails) - - for user in adding_users: - notifications_services.add_watcher(self.object, user) - - for user in removing_users: - notifications_services.remove_watcher(self.object, user) - - self.object.watchers = [user.email for user in self.object.get_watchers()] - - def to_native(self, obj): - ret = super(WatcheableObjectModelSerializer, self).to_native(obj) - ret["watchers"] = [user.email for user in obj.get_watchers()] - return ret - - -class HistoryExportSerializer(serializers.ModelSerializer): - user = HistoryUserField() - diff = HistoryDiffField(required=False) - snapshot = JsonField(required=False) - values = HistoryValuesField(required=False) - comment = CommentField(required=False) - delete_comment_date = serializers.DateTimeField(required=False) - delete_comment_user = HistoryUserField(required=False) - - class Meta: - model = history_models.HistoryEntry - exclude = ("id", "comment_html", "key") - - -class HistoryExportSerializerMixin(serializers.ModelSerializer): - history = serializers.SerializerMethodField("get_history") - - def get_history(self, obj): - history_qs = history_service.get_history_queryset_by_model_instance(obj, - types=(history_models.HistoryType.change, history_models.HistoryType.create,)) - - return HistoryExportSerializer(history_qs, many=True).data - - -class AttachmentExportSerializer(serializers.ModelSerializer): - owner = UserRelatedField(required=False) - attached_file = FileField() - modified_date = serializers.DateTimeField(required=False) - - class Meta: - model = attachments_models.Attachment - exclude = ('id', 'content_type', 'object_id', 'project') - - -class AttachmentExportSerializerMixin(serializers.ModelSerializer): - attachments = serializers.SerializerMethodField("get_attachments") - - def get_attachments(self, obj): - content_type = ContentType.objects.get_for_model(obj.__class__) - attachments_qs = attachments_models.Attachment.objects.filter(object_id=obj.pk, - content_type=content_type) - return AttachmentExportSerializer(attachments_qs, many=True).data +from .fields import (FileField, RelatedNoneSafeField, UserRelatedField, + UserPkField, CommentField, ProjectRelatedField, + HistoryUserField, HistoryValuesField, HistoryDiffField, + TimelineDataField) +from .mixins import (HistoryExportSerializerMixin, + AttachmentExportSerializerMixin, + CustomAttributesValuesExportSerializerMixin, + WatcheableObjectModelSerializerMixin) +from .cache import (_custom_tasks_attributes_cache, + _custom_userstories_attributes_cache, + _custom_issues_attributes_cache) class PointsExportSerializer(serializers.ModelSerializer): @@ -424,31 +124,6 @@ class IssueCustomAttributeExportSerializer(serializers.ModelSerializer): exclude = ('id', 'project') -class CustomAttributesValuesExportSerializerMixin(serializers.ModelSerializer): - custom_attributes_values = serializers.SerializerMethodField("get_custom_attributes_values") - - def custom_attributes_queryset(self, project): - raise NotImplementedError() - - def get_custom_attributes_values(self, obj): - def _use_name_instead_id_as_key_in_custom_attributes_values(custom_attributes, values): - ret = {} - for attr in custom_attributes: - value = values.get(str(attr["id"]), None) - if value is not None: - ret[attr["name"]] = value - - return ret - - try: - values = obj.custom_attributes_values.attributes_values - custom_attributes = self.custom_attributes_queryset(obj.project) - - return _use_name_instead_id_as_key_in_custom_attributes_values(custom_attributes, values) - except ObjectDoesNotExist: - return None - - class BaseCustomAttributesValuesExportSerializer(serializers.ModelSerializer): attributes_values = JsonField(source="attributes_values",required=True) _custom_attribute_model = None @@ -530,7 +205,7 @@ class RolePointsExportSerializer(serializers.ModelSerializer): exclude = ('id', 'user_story') -class MilestoneExportSerializer(WatcheableObjectModelSerializer): +class MilestoneExportSerializer(WatcheableObjectModelSerializerMixin): owner = UserRelatedField(required=False) modified_date = serializers.DateTimeField(required=False) estimated_start = serializers.DateField(required=False) @@ -559,7 +234,7 @@ class MilestoneExportSerializer(WatcheableObjectModelSerializer): class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin, - AttachmentExportSerializerMixin, WatcheableObjectModelSerializer): + AttachmentExportSerializerMixin, WatcheableObjectModelSerializerMixin): owner = UserRelatedField(required=False) status = ProjectRelatedField(slug_field="name") user_story = ProjectRelatedField(slug_field="ref", required=False) @@ -578,7 +253,7 @@ class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryE class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin, - AttachmentExportSerializerMixin, WatcheableObjectModelSerializer): + AttachmentExportSerializerMixin, WatcheableObjectModelSerializerMixin): role_points = RolePointsExportSerializer(many=True, required=False) owner = UserRelatedField(required=False) assigned_to = UserRelatedField(required=False) @@ -598,7 +273,7 @@ class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin, His class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin, - AttachmentExportSerializerMixin, WatcheableObjectModelSerializer): + AttachmentExportSerializerMixin, WatcheableObjectModelSerializerMixin): owner = UserRelatedField(required=False) status = ProjectRelatedField(slug_field="name") assigned_to = UserRelatedField(required=False) @@ -623,7 +298,7 @@ class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin, History class WikiPageExportSerializer(HistoryExportSerializerMixin, AttachmentExportSerializerMixin, - WatcheableObjectModelSerializer): + WatcheableObjectModelSerializerMixin): owner = UserRelatedField(required=False) last_modifier = UserRelatedField(required=False) modified_date = serializers.DateTimeField(required=False) @@ -640,31 +315,6 @@ class WikiLinkExportSerializer(serializers.ModelSerializer): -class TimelineDataField(serializers.WritableField): - read_only = False - - def to_native(self, data): - new_data = copy.deepcopy(data) - try: - user = cached_get_user_by_pk(new_data["user"]["id"]) - new_data["user"]["email"] = user.email - del new_data["user"]["id"] - except Exception: - pass - return new_data - - def from_native(self, data): - new_data = copy.deepcopy(data) - try: - user = cached_get_user_by_email(new_data["user"]["email"]) - new_data["user"]["id"] = user.id - del new_data["user"]["email"] - except users_models.User.DoesNotExist: - pass - - return new_data - - class TimelineExportSerializer(serializers.ModelSerializer): data = TimelineDataField() class Meta: @@ -672,7 +322,7 @@ class TimelineExportSerializer(serializers.ModelSerializer): exclude = ('id', 'project', 'namespace', 'object_id') -class ProjectExportSerializer(WatcheableObjectModelSerializer): +class ProjectExportSerializer(WatcheableObjectModelSerializerMixin): logo = FileField(required=False) anon_permissions = PgArrayField(required=False) public_permissions = PgArrayField(required=False) From 44d46ad47b87e9d11cd0ac70e4abd7c10da9527a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Espino?= Date: Fri, 17 Jun 2016 11:41:58 +0200 Subject: [PATCH 4/5] Fixing timeline exportation --- taiga/export_import/serializers/fields.py | 16 ++++++++++++++++ taiga/export_import/serializers/serializers.py | 5 +++-- taiga/export_import/services/store.py | 3 ++- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/taiga/export_import/serializers/fields.py b/taiga/export_import/serializers/fields.py index f2ca8841..64c01436 100644 --- a/taiga/export_import/serializers/fields.py +++ b/taiga/export_import/serializers/fields.py @@ -25,6 +25,7 @@ from django.core.files.base import ContentFile from django.core.exceptions import ObjectDoesNotExist from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ +from django.contrib.contenttypes.models import ContentType from taiga.base.api import serializers from taiga.base.fields import JsonField @@ -68,6 +69,21 @@ class FileField(serializers.WritableField): return ContentFile(decoded_data, name=data['name']) +class ContentTypeField(serializers.RelatedField): + read_only = False + + def to_native(self, obj): + if obj: + return [obj.app_label, obj.model] + return None + + def from_native(self, data): + try: + return ContentType.objects.get_by_natural_key(*data) + except Exception: + return None + + class RelatedNoneSafeField(serializers.RelatedField): def field_from_native(self, data, files, field_name, into): if self.read_only: diff --git a/taiga/export_import/serializers/serializers.py b/taiga/export_import/serializers/serializers.py index e7a2af76..7cf46cba 100644 --- a/taiga/export_import/serializers/serializers.py +++ b/taiga/export_import/serializers/serializers.py @@ -40,7 +40,7 @@ from taiga.projects.votes import services as votes_service from .fields import (FileField, RelatedNoneSafeField, UserRelatedField, UserPkField, CommentField, ProjectRelatedField, HistoryUserField, HistoryValuesField, HistoryDiffField, - TimelineDataField) + TimelineDataField, ContentTypeField) from .mixins import (HistoryExportSerializerMixin, AttachmentExportSerializerMixin, CustomAttributesValuesExportSerializerMixin, @@ -317,9 +317,10 @@ class WikiLinkExportSerializer(serializers.ModelSerializer): class TimelineExportSerializer(serializers.ModelSerializer): data = TimelineDataField() + data_content_type = ContentTypeField() class Meta: model = timeline_models.Timeline - exclude = ('id', 'project', 'namespace', 'object_id') + exclude = ('id', 'project', 'namespace', 'object_id', 'content_type') class ProjectExportSerializer(WatcheableObjectModelSerializerMixin): diff --git a/taiga/export_import/services/store.py b/taiga/export_import/services/store.py index c7888ce4..5d71c445 100644 --- a/taiga/export_import/services/store.py +++ b/taiga/export_import/services/store.py @@ -577,6 +577,7 @@ def _store_timeline_entry(project, timeline): serialized.object.project = project serialized.object.namespace = build_project_namespace(project) serialized.object.object_id = project.id + serialized.object.content_type = ContentType.objects.get_for_model(project.__class__) serialized.object._importing = True serialized.save() return serialized @@ -725,7 +726,7 @@ def store_project_from_dict(data, owner=None): except err.TaigaImportError: # reraise known inport errors raise - except: + except Exception: # reise unknown errors as import error raise err.TaigaImportError(_("unexpected error importing project"), project) From 773ab631064a155983fe173a5d45af336472aa3b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jes=C3=BAs=20Espino?= Date: Mon, 20 Jun 2016 12:24:34 +0200 Subject: [PATCH 5/5] Adding dump_project_async command --- .../management/commands/dump_project_async.py | 82 +++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 taiga/export_import/management/commands/dump_project_async.py diff --git a/taiga/export_import/management/commands/dump_project_async.py b/taiga/export_import/management/commands/dump_project_async.py new file mode 100644 index 00000000..d48a0c19 --- /dev/null +++ b/taiga/export_import/management/commands/dump_project_async.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +from django.core.management.base import BaseCommand, CommandError +from django.db.models import Q +from django.conf import settings + +from taiga.projects.models import Project +from taiga.users.models import User +from taiga.permissions.services import is_project_admin +from taiga.export_import import tasks + + +class Command(BaseCommand): + help = "Export projects to a json file" + + def add_arguments(self, parser): + parser.add_argument("project_slugs", + nargs="+", + help="") + + parser.add_argument("-u", "--user", + action="store", + dest="user", + default="./", + metavar="DIR", + required=True, + help="Dump as user by email or username.") + + parser.add_argument("-f", "--format", + action="store", + dest="format", + default="plain", + metavar="[plain|gzip]", + help="Format to the output file plain json or gzipped json. ('plain' by default)") + + def handle(self, *args, **options): + username_or_email = options["user"] + dump_format = options["format"] + project_slugs = options["project_slugs"] + + try: + user = User.objects.get(Q(username=username_or_email) | Q(email=username_or_email)) + except Exception: + raise CommandError("Error loading user".format(username_or_email)) + + for project_slug in project_slugs: + try: + project = Project.objects.get(slug=project_slug) + except Project.DoesNotExist: + raise CommandError("Project '{}' does not exist".format(project_slug)) + + if not is_project_admin(user, project): + self.stderr.write(self.style.ERROR( + "ERROR: Not sending task because user '{}' doesn't have permissions to export '{}' project".format( + username_or_email, + project_slug + ) + )) + continue + + task = tasks.dump_project.delay(user, project, dump_format) + tasks.delete_project_dump.apply_async( + (project.pk, project.slug, task.id, dump_format), + countdown=settings.EXPORTS_TTL + ) + print("-> Sent task for dump of project '{}' as user {}".format(project.name, username_or_email))