diff --git a/CHANGELOG.md b/CHANGELOG.md index 24145d95..7630f9ca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,8 @@ ## 2.1.0 ??? (unreleased) ### Features +- Add sprint name and slug on search results for user stories ((thanks to [@everblut](https://github.com/everblut))) +- [API] projects resource: Random order if `discover_mode=true` and `is_featured=true`. - Webhooks: Improve webhook data: - add permalinks - owner, assigned_to, status, type, priority, severity, user_story, milestone, project are objects @@ -15,8 +17,9 @@ - CSV Reports: - Change field name: 'milestone' to 'sprint' - Add new fields: 'sprint_estimated_start' and 'sprint_estimated_end' -- Add sprint name and slug on search results for user stories ((thanks to [@everblut](https://github.com/everblut))) -- [API] projects resource: Random order if `discover_mode=true` and `is_featured=true`. +- Importer: + - Remove project after load a dump file fails + - Add more info the the logger if load a dump file fails ### Misc - Lots of small and not so small bugfixes. diff --git a/settings/local.py.example b/settings/local.py.example index e1bd9383..09e53ca4 100644 --- a/settings/local.py.example +++ b/settings/local.py.example @@ -96,3 +96,11 @@ DATABASES = { # If is True /front/sitemap.xml show a valid sitemap of taiga-front client #FRONT_SITEMAP_ENABLED = False #FRONT_SITEMAP_CACHE_TIMEOUT = 24*60*60 # In second + +# CELERY +#from .celery import * +#CELERY_ENABLED = True +# +# To use celery in memory +#CELERY_ENABLED = True +#CELERY_ALWAYS_EAGER = True diff --git a/settings/testing.py b/settings/testing.py index b1549a8a..6862a5b1 100644 --- a/settings/testing.py +++ b/settings/testing.py @@ -17,10 +17,8 @@ from .development import * -SKIP_SOUTH_TESTS = True -SOUTH_TESTS_MIGRATE = False -CELERY_ALWAYS_EAGER = True CELERY_ENABLED = False +CELERY_ALWAYS_EAGER = True MEDIA_ROOT = "/tmp" diff --git a/taiga/export_import/api.py b/taiga/export_import/api.py index 5b570980..f84e263f 100644 --- a/taiga/export_import/api.py +++ b/taiga/export_import/api.py @@ -36,14 +36,14 @@ from taiga.projects.models import Project, Membership from taiga.projects.issues.models import Issue from taiga.projects.tasks.models import Task from taiga.projects.serializers import ProjectSerializer -from taiga.users import services as users_service +from taiga.users import services as users_services +from . import exceptions as err from . import mixins -from . import serializers -from . import service from . import permissions +from . import serializers +from . import services from . import tasks -from . import dump_service from . import throttling from .renderers import ExportRenderer @@ -72,7 +72,7 @@ class ProjectExporterViewSet(mixins.ImportThrottlingPolicyMixin, GenericViewSet) path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex) storage_path = default_storage.path(path) with default_storage.open(storage_path, mode="w") as outfile: - service.render_project(project, outfile) + services.render_project(project, outfile) response_data = { "url": default_storage.url(path) @@ -96,7 +96,7 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi total_memberships = len([m for m in data.get("memberships", []) if m.get("email", None) != data["owner"]]) total_memberships = total_memberships + 1 # 1 is the owner - (enough_slots, error_message) = users_service.has_available_slot_for_import_new_project( + (enough_slots, error_message) = users_services.has_available_slot_for_import_new_project( self.request.user, is_private, total_memberships @@ -105,22 +105,22 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi raise exc.NotEnoughSlotsForProject(is_private, total_memberships, error_message) # Create Project - project_serialized = service.store_project(data) + project_serialized = services.store.store_project(data) if not project_serialized: - raise exc.BadRequest(service.get_errors()) + raise exc.BadRequest(services.store.get_errors()) # Create roles roles_serialized = None if "roles" in data: - roles_serialized = service.store_roles(project_serialized.object, data) + roles_serialized = services.store.store_roles(project_serialized.object, data) if not roles_serialized: raise exc.BadRequest(_("We needed at least one role")) # Create memberships if "memberships" in data: - service.store_memberships(project_serialized.object, data) + services.store.store_memberships(project_serialized.object, data) try: owner_membership = project_serialized.object.memberships.get(user=project_serialized.object.owner) @@ -137,57 +137,57 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi # Create project values choicess if "points" in data: - service.store_choices(project_serialized.object, data, - "points", serializers.PointsExportSerializer) + services.store.store_project_attributes_values(project_serialized.object, data, + "points", serializers.PointsExportSerializer) if "issue_types" in data: - service.store_choices(project_serialized.object, data, - "issue_types", - serializers.IssueTypeExportSerializer) + services.store.store_project_attributes_values(project_serialized.object, data, + "issue_types", + serializers.IssueTypeExportSerializer) if "issue_statuses" in data: - service.store_choices(project_serialized.object, data, - "issue_statuses", - serializers.IssueStatusExportSerializer,) + services.store.store_project_attributes_values(project_serialized.object, data, + "issue_statuses", + serializers.IssueStatusExportSerializer,) if "us_statuses" in data: - service.store_choices(project_serialized.object, data, - "us_statuses", - serializers.UserStoryStatusExportSerializer,) + services.store.store_project_attributes_values(project_serialized.object, data, + "us_statuses", + serializers.UserStoryStatusExportSerializer,) if "task_statuses" in data: - service.store_choices(project_serialized.object, data, - "task_statuses", - serializers.TaskStatusExportSerializer) + services.store.store_project_attributes_values(project_serialized.object, data, + "task_statuses", + serializers.TaskStatusExportSerializer) if "priorities" in data: - service.store_choices(project_serialized.object, data, - "priorities", - serializers.PriorityExportSerializer) + services.store.store_project_attributes_values(project_serialized.object, data, + "priorities", + serializers.PriorityExportSerializer) if "severities" in data: - service.store_choices(project_serialized.object, data, - "severities", - serializers.SeverityExportSerializer) + services.store.store_project_attributes_values(project_serialized.object, data, + "severities", + serializers.SeverityExportSerializer) if ("points" in data or "issues_types" in data or "issues_statuses" in data or "us_statuses" in data or "task_statuses" in data or "priorities" in data or "severities" in data): - service.store_default_choices(project_serialized.object, data) + services.store.store_default_project_attributes_values(project_serialized.object, data) # Created custom attributes if "userstorycustomattributes" in data: - service.store_custom_attributes(project_serialized.object, data, - "userstorycustomattributes", - serializers.UserStoryCustomAttributeExportSerializer) + services.store.store_custom_attributes(project_serialized.object, data, + "userstorycustomattributes", + serializers.UserStoryCustomAttributeExportSerializer) if "taskcustomattributes" in data: - service.store_custom_attributes(project_serialized.object, data, - "taskcustomattributes", - serializers.TaskCustomAttributeExportSerializer) + services.store.store_custom_attributes(project_serialized.object, data, + "taskcustomattributes", + serializers.TaskCustomAttributeExportSerializer) if "issuecustomattributes" in data: - service.store_custom_attributes(project_serialized.object, data, - "issuecustomattributes", - serializers.IssueCustomAttributeExportSerializer) + services.store.store_custom_attributes(project_serialized.object, data, + "issuecustomattributes", + serializers.IssueCustomAttributeExportSerializer) # Is there any error? - errors = service.get_errors() + errors = services.store.get_errors() if errors: raise exc.BadRequest(errors) @@ -199,21 +199,33 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi @detail_route(methods=['post']) @method_decorator(atomic) - def issue(self, request, *args, **kwargs): + def milestone(self, request, *args, **kwargs): project = self.get_object_or_none() self.check_permissions(request, 'import_item', project) - signals.pre_save.disconnect(sender=Issue, - dispatch_uid="set_finished_date_when_edit_issue") + milestone = services.store.store_milestone(project, request.DATA.copy()) - issue = service.store_issue(project, request.DATA.copy()) - - errors = service.get_errors() + errors = services.store.get_errors() if errors: raise exc.BadRequest(errors) - headers = self.get_success_headers(issue.data) - return response.Created(issue.data, headers=headers) + headers = self.get_success_headers(milestone.data) + return response.Created(milestone.data, headers=headers) + + @detail_route(methods=['post']) + @method_decorator(atomic) + def us(self, request, *args, **kwargs): + project = self.get_object_or_none() + self.check_permissions(request, 'import_item', project) + + us = services.store.store_user_story(project, request.DATA.copy()) + + errors = services.store.get_errors() + if errors: + raise exc.BadRequest(errors) + + headers = self.get_success_headers(us.data) + return response.Created(us.data, headers=headers) @detail_route(methods=['post']) @method_decorator(atomic) @@ -224,9 +236,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi signals.pre_save.disconnect(sender=Task, dispatch_uid="set_finished_date_when_edit_task") - task = service.store_task(project, request.DATA.copy()) + task = services.store.store_task(project, request.DATA.copy()) - errors = service.get_errors() + errors = services.store.get_errors() if errors: raise exc.BadRequest(errors) @@ -235,33 +247,21 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi @detail_route(methods=['post']) @method_decorator(atomic) - def us(self, request, *args, **kwargs): + def issue(self, request, *args, **kwargs): project = self.get_object_or_none() self.check_permissions(request, 'import_item', project) - us = service.store_user_story(project, request.DATA.copy()) + signals.pre_save.disconnect(sender=Issue, + dispatch_uid="set_finished_date_when_edit_issue") - errors = service.get_errors() + issue = services.store.store_issue(project, request.DATA.copy()) + + errors = services.store.get_errors() if errors: raise exc.BadRequest(errors) - headers = self.get_success_headers(us.data) - return response.Created(us.data, headers=headers) - - @detail_route(methods=['post']) - @method_decorator(atomic) - def milestone(self, request, *args, **kwargs): - project = self.get_object_or_none() - self.check_permissions(request, 'import_item', project) - - milestone = service.store_milestone(project, request.DATA.copy()) - - errors = service.get_errors() - if errors: - raise exc.BadRequest(errors) - - headers = self.get_success_headers(milestone.data) - return response.Created(milestone.data, headers=headers) + headers = self.get_success_headers(issue.data) + return response.Created(issue.data, headers=headers) @detail_route(methods=['post']) @method_decorator(atomic) @@ -269,9 +269,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi project = self.get_object_or_none() self.check_permissions(request, 'import_item', project) - wiki_page = service.store_wiki_page(project, request.DATA.copy()) + wiki_page = services.store.store_wiki_page(project, request.DATA.copy()) - errors = service.get_errors() + errors = services.store.get_errors() if errors: raise exc.BadRequest(errors) @@ -284,9 +284,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi project = self.get_object_or_none() self.check_permissions(request, 'import_item', project) - wiki_link = service.store_wiki_link(project, request.DATA.copy()) + wiki_link = services.store.store_wiki_link(project, request.DATA.copy()) - errors = service.get_errors() + errors = services.store.get_errors() if errors: raise exc.BadRequest(errors) @@ -327,7 +327,7 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi total_memberships = len([m for m in dump.get("memberships", []) if m.get("email", None) != dump["owner"]]) total_memberships = total_memberships + 1 # 1 is the owner - (enough_slots, error_message) = users_service.has_available_slot_for_import_new_project( + (enough_slots, error_message) = users_services.has_available_slot_for_import_new_project( user, is_private, total_memberships @@ -335,11 +335,23 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi if not enough_slots: raise exc.NotEnoughSlotsForProject(is_private, total_memberships, error_message) + # Async mode if settings.CELERY_ENABLED: task = tasks.load_project_dump.delay(user, dump) return response.Accepted({"import_id": task.id}) - project = dump_service.dict_to_project(dump, request.user) - response_data = ProjectSerializer(project).data - return response.Created(response_data) + # Sync mode + try: + project = services.store_project_from_dict(dump, request.user) + except err.TaigaImportError as e: + # On Error + ## remove project + if e.project: + e.project.delete_related_content() + e.project.delete() + return response.BadRequest({"error": e.message, "details": e.errors}) + else: + # On Success + response_data = ProjectSerializer(project).data + return response.Created(response_data) diff --git a/taiga/export_import/dump_service.py b/taiga/export_import/dump_service.py deleted file mode 100644 index 243b9167..00000000 --- a/taiga/export_import/dump_service.py +++ /dev/null @@ -1,202 +0,0 @@ -# Copyright (C) 2014-2016 Andrey Antukh -# Copyright (C) 2014-2016 Jesús Espino -# Copyright (C) 2014-2016 David Barragán -# Copyright (C) 2014-2016 Alejandro Alonso -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as -# published by the Free Software Foundation, either version 3 of the -# License, or (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -from django.utils.decorators import method_decorator -from django.utils.translation import ugettext as _ - -from taiga.projects.models import Membership, Project -from taiga.users import services as users_service - -from . import serializers -from . import service - - -class TaigaImportError(Exception): - def __init__(self, message): - self.message = message - - -def store_milestones(project, data): - results = [] - for milestone_data in data.get("milestones", []): - milestone = service.store_milestone(project, milestone_data) - results.append(milestone) - return results - - -def store_tasks(project, data): - results = [] - for task in data.get("tasks", []): - task = service.store_task(project, task) - results.append(task) - return results - - -def store_wiki_pages(project, data): - results = [] - for wiki_page in data.get("wiki_pages", []): - results.append(service.store_wiki_page(project, wiki_page)) - return results - - -def store_wiki_links(project, data): - results = [] - for wiki_link in data.get("wiki_links", []): - results.append(service.store_wiki_link(project, wiki_link)) - return results - - -def store_user_stories(project, data): - results = [] - for userstory in data.get("user_stories", []): - us = service.store_user_story(project, userstory) - results.append(us) - return results - - -def store_timeline_entries(project, data): - results = [] - for timeline in data.get("timeline", []): - tl = service.store_timeline_entry(project, timeline) - results.append(tl) - return results - - -def store_issues(project, data): - issues = [] - for issue in data.get("issues", []): - issues.append(service.store_issue(project, issue)) - return issues - - -def store_tags_colors(project, data): - project.tags_colors = data.get("tags_colors", []) - project.save() - return None - - -def dict_to_project(data, owner=None): - if owner: - data["owner"] = owner.email - - # Validate if the owner can have this project - is_private = data.get("is_private", False) - total_memberships = len([m for m in data.get("memberships", []) - if m.get("email", None) != data["owner"]]) - total_memberships = total_memberships + 1 # 1 is the owner - (enough_slots, error_message) = users_service.has_available_slot_for_import_new_project( - owner, - is_private, - total_memberships - ) - if not enough_slots: - raise TaigaImportError(error_message) - - project_serialized = service.store_project(data) - - if not project_serialized: - raise TaigaImportError(_("error importing project data")) - - proj = project_serialized.object - - service.store_choices(proj, data, "points", serializers.PointsExportSerializer) - service.store_choices(proj, data, "issue_types", serializers.IssueTypeExportSerializer) - service.store_choices(proj, data, "issue_statuses", serializers.IssueStatusExportSerializer) - service.store_choices(proj, data, "us_statuses", serializers.UserStoryStatusExportSerializer) - service.store_choices(proj, data, "task_statuses", serializers.TaskStatusExportSerializer) - service.store_choices(proj, data, "priorities", serializers.PriorityExportSerializer) - service.store_choices(proj, data, "severities", serializers.SeverityExportSerializer) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing lists of project attributes")) - - service.store_default_choices(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing default project attributes values")) - - service.store_custom_attributes(proj, data, "userstorycustomattributes", - serializers.UserStoryCustomAttributeExportSerializer) - service.store_custom_attributes(proj, data, "taskcustomattributes", - serializers.TaskCustomAttributeExportSerializer) - service.store_custom_attributes(proj, data, "issuecustomattributes", - serializers.IssueCustomAttributeExportSerializer) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing custom attributes")) - - service.store_roles(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing roles")) - - service.store_memberships(proj, data) - - if proj.memberships.filter(user=proj.owner).count() == 0: - if proj.roles.all().count() > 0: - Membership.objects.create( - project=proj, - email=proj.owner.email, - user=proj.owner, - role=proj.roles.all().first(), - is_admin=True - ) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing memberships")) - - store_milestones(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing sprints")) - - store_wiki_pages(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing wiki pages")) - - store_wiki_links(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing wiki links")) - - store_issues(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing issues")) - - store_user_stories(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing user stories")) - - store_tasks(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing tasks")) - - store_tags_colors(proj, data) - - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing tags")) - - store_timeline_entries(proj, data) - if service.get_errors(clear=False): - raise TaigaImportError(_("error importing timelines")) - - proj.refresh_totals() - return proj diff --git a/taiga/export_import/exceptions.py b/taiga/export_import/exceptions.py new file mode 100644 index 00000000..623d5b24 --- /dev/null +++ b/taiga/export_import/exceptions.py @@ -0,0 +1,23 @@ +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + + +class TaigaImportError(Exception): + def __init__(self, message, project, errors=[]): + self.message = message + self.project = project + self.errors = errors diff --git a/taiga/export_import/management/commands/dump_project.py b/taiga/export_import/management/commands/dump_project.py index daf6f6d5..dc59a17b 100644 --- a/taiga/export_import/management/commands/dump_project.py +++ b/taiga/export_import/management/commands/dump_project.py @@ -19,7 +19,7 @@ from django.core.management.base import BaseCommand, CommandError from taiga.projects.models import Project from taiga.export_import.renderers import ExportRenderer -from taiga.export_import.service import render_project +from taiga.export_import.services import render_project import resource diff --git a/taiga/export_import/management/commands/load_dump.py b/taiga/export_import/management/commands/load_dump.py index 367a2401..a1d919f0 100644 --- a/taiga/export_import/management/commands/load_dump.py +++ b/taiga/export_import/management/commands/load_dump.py @@ -21,10 +21,10 @@ from django.db.models import signals from optparse import make_option from taiga.base.utils import json -from taiga.projects.models import Project +from taiga.export_import.import services +from taiga.export_import.exceptions as err from taiga.export_import.renderers import ExportRenderer -from taiga.export_import.dump_service import dict_to_project, TaigaImportError -from taiga.export_import.service import get_errors +from taiga.projects.models import Project from taiga.users.models import User @@ -61,8 +61,12 @@ class Command(BaseCommand): signals.post_delete.receivers = receivers_back user = User.objects.get(email=args[1]) - dict_to_project(data, user) - except TaigaImportError as e: + services.store_project_from_dict(data, user) + except err.TaigaImportError as e: + if e.project: + e.project.delete_related_content() + e.project.delete() + print("ERROR:", end=" ") print(e.message) - print(get_errors()) + print(services.store.get_errors()) diff --git a/taiga/export_import/services/__init__.py b/taiga/export_import/services/__init__.py new file mode 100644 index 00000000..8aad0f08 --- /dev/null +++ b/taiga/export_import/services/__init__.py @@ -0,0 +1,26 @@ +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +# This makes all code that import services works and +# is not the baddest practice ;) + +from .render import render_project +from . import render + +from .store import store_project_from_dict +from . import store + diff --git a/taiga/export_import/services/render.py b/taiga/export_import/services/render.py new file mode 100644 index 00000000..b9905baf --- /dev/null +++ b/taiga/export_import/services/render.py @@ -0,0 +1,124 @@ +# Copyright (C) 2014-2016 Andrey Antukh +# Copyright (C) 2014-2016 Jesús Espino +# Copyright (C) 2014-2016 David Barragán +# Copyright (C) 2014-2016 Alejandro Alonso +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as +# published by the Free Software Foundation, either version 3 of the +# License, or (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +# This makes all code that import services works and +# is not the baddest practice ;) + +import base64 +import gc +import os + +from django.core.files.storage import default_storage + +from taiga.base.utils import json +from taiga.timeline.service import get_project_timeline +from taiga.base.api.fields import get_component + +from .. import serializers + + +def render_project(project, outfile, chunk_size = 8190): + serializer = serializers.ProjectExportSerializer(project) + outfile.write('{\n') + + first_field = True + for field_name in serializer.fields.keys(): + # Avoid writing "," in the last element + if not first_field: + outfile.write(",\n") + else: + first_field = False + + field = serializer.fields.get(field_name) + field.initialize(parent=serializer, field_name=field_name) + + # These four "special" fields hava attachments so we use them in a special way + if field_name in ["wiki_pages", "user_stories", "tasks", "issues"]: + value = get_component(project, field_name) + outfile.write('"{}": [\n'.format(field_name)) + + attachments_field = field.fields.pop("attachments", None) + if attachments_field: + attachments_field.initialize(parent=field, field_name="attachments") + + first_item = True + for item in value.iterator(): + # Avoid writing "," in the last element + if not first_item: + outfile.write(",\n") + else: + first_item = False + + + dumped_value = json.dumps(field.to_native(item)) + writing_value = dumped_value[:-1]+ ',\n "attachments": [\n' + outfile.write(writing_value) + + first_attachment = True + for attachment in item.attachments.iterator(): + # Avoid writing "," in the last element + if not first_attachment: + outfile.write(",\n") + else: + first_attachment = False + + # Write all the data expect the serialized file + attachment_serializer = serializers.AttachmentExportSerializer(instance=attachment) + attached_file_serializer = attachment_serializer.fields.pop("attached_file") + dumped_value = json.dumps(attachment_serializer.data) + dumped_value = dumped_value[:-1] + ',\n "attached_file":{\n "data":"' + outfile.write(dumped_value) + + # We write the attached_files by chunks so the memory used is not increased + attachment_file = attachment.attached_file + if default_storage.exists(attachment_file.name): + with default_storage.open(attachment_file.name) as f: + while True: + bin_data = f.read(chunk_size) + if not bin_data: + break + + b64_data = base64.b64encode(bin_data).decode('utf-8') + outfile.write(b64_data) + + outfile.write('", \n "name":"{}"}}\n}}'.format( + os.path.basename(attachment_file.name))) + + outfile.write(']}') + outfile.flush() + gc.collect() + outfile.write(']') + + else: + value = field.field_to_native(project, field_name) + outfile.write('"{}": {}'.format(field_name, json.dumps(value))) + + # Generate the timeline + outfile.write(',\n"timeline": [\n') + first_timeline = True + for timeline_item in get_project_timeline(project).iterator(): + # Avoid writing "," in the last element + if not first_timeline: + outfile.write(",\n") + else: + first_timeline = False + + dumped_value = json.dumps(serializers.TimelineExportSerializer(timeline_item).data) + outfile.write(dumped_value) + + outfile.write(']}\n') + diff --git a/taiga/export_import/service.py b/taiga/export_import/services/store.py similarity index 63% rename from taiga/export_import/service.py rename to taiga/export_import/services/store.py index 14ecd22d..e286c97c 100644 --- a/taiga/export_import/service.py +++ b/taiga/export_import/services/store.py @@ -15,30 +15,35 @@ # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see . -import base64 -import gc -import resource +# This makes all code that import services works and +# is not the baddest practice ;) + import os -import os.path as path import uuid from unidecode import unidecode -from django.template.defaultfilters import slugify from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist -from django.core.files.storage import default_storage +from django.template.defaultfilters import slugify +from django.utils.translation import ugettext as _ -from taiga.base.utils import json from taiga.projects.history.services import make_key_from_model_object, take_snapshot -from taiga.timeline.service import build_project_namespace, get_project_timeline +from taiga.projects.models import Membership from taiga.projects.references import sequences as seq from taiga.projects.references import models as refs from taiga.projects.userstories.models import RolePoints from taiga.projects.services import find_invited_user -from taiga.base.api.fields import get_component +from taiga.timeline.service import build_project_namespace +from taiga.users import services as users_service -from . import serializers +from .. import exceptions as err +from .. import serializers + + +######################################################################## +## Manage errors +######################################################################## _errors_log = {} @@ -57,97 +62,16 @@ def add_errors(section, errors): _errors_log[section] = [errors] -def render_project(project, outfile, chunk_size = 8190): - serializer = serializers.ProjectExportSerializer(project) - outfile.write('{\n') - - first_field = True - for field_name in serializer.fields.keys(): - # Avoid writing "," in the last element - if not first_field: - outfile.write(",\n") - else: - first_field = False - - field = serializer.fields.get(field_name) - field.initialize(parent=serializer, field_name=field_name) - - # These four "special" fields hava attachments so we use them in a special way - if field_name in ["wiki_pages", "user_stories", "tasks", "issues"]: - value = get_component(project, field_name) - outfile.write('"{}": [\n'.format(field_name)) - - attachments_field = field.fields.pop("attachments", None) - if attachments_field: - attachments_field.initialize(parent=field, field_name="attachments") - - first_item = True - for item in value.iterator(): - # Avoid writing "," in the last element - if not first_item: - outfile.write(",\n") - else: - first_item = False +def reset_errors(): + _errors_log.clear() - dumped_value = json.dumps(field.to_native(item)) - writing_value = dumped_value[:-1]+ ',\n "attachments": [\n' - outfile.write(writing_value) +######################################################################## +## Store functions +######################################################################## - first_attachment = True - for attachment in item.attachments.iterator(): - # Avoid writing "," in the last element - if not first_attachment: - outfile.write(",\n") - else: - first_attachment = False - - # Write all the data expect the serialized file - attachment_serializer = serializers.AttachmentExportSerializer(instance=attachment) - attached_file_serializer = attachment_serializer.fields.pop("attached_file") - dumped_value = json.dumps(attachment_serializer.data) - dumped_value = dumped_value[:-1] + ',\n "attached_file":{\n "data":"' - outfile.write(dumped_value) - - # We write the attached_files by chunks so the memory used is not increased - attachment_file = attachment.attached_file - if default_storage.exists(attachment_file.name): - with default_storage.open(attachment_file.name) as f: - while True: - bin_data = f.read(chunk_size) - if not bin_data: - break - - b64_data = base64.b64encode(bin_data).decode('utf-8') - outfile.write(b64_data) - - outfile.write('", \n "name":"{}"}}\n}}'.format( - os.path.basename(attachment_file.name))) - - outfile.write(']}') - outfile.flush() - gc.collect() - outfile.write(']') - - else: - value = field.field_to_native(project, field_name) - outfile.write('"{}": {}'.format(field_name, json.dumps(value))) - - # Generate the timeline - outfile.write(',\n"timeline": [\n') - first_timeline = True - for timeline_item in get_project_timeline(project).iterator(): - # Avoid writing "," in the last element - if not first_timeline: - outfile.write(",\n") - else: - first_timeline = False - - dumped_value = json.dumps(serializers.TimelineExportSerializer(timeline_item).data) - outfile.write(dumped_value) - - outfile.write(']}\n') +## PROJECT def store_project(data): project_data = {} @@ -175,43 +99,19 @@ def store_project(data): return None -def _store_choice(project, data, field, serializer): - serialized = serializer(data=data) - if serialized.is_valid(): - serialized.object.project = project - serialized.object._importing = True - serialized.save() - return serialized.object - add_errors(field, serialized.errors) - return None +## MISC + +def _use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes, values): + ret = {} + for attr in custom_attributes: + value = values.get(attr["name"], None) + if value is not None: + ret[str(attr["id"])] = value + + return ret -def store_choices(project, data, field, serializer): - result = [] - for choice_data in data.get(field, []): - result.append(_store_choice(project, choice_data, field, serializer)) - return result - - -def _store_custom_attribute(project, data, field, serializer): - serialized = serializer(data=data) - if serialized.is_valid(): - serialized.object.project = project - serialized.object._importing = True - serialized.save() - return serialized.object - add_errors(field, serialized.errors) - return None - - -def store_custom_attributes(project, data, field, serializer): - result = [] - for custom_attribute_data in data.get(field, []): - result.append(_store_custom_attribute(project, custom_attribute_data, field, serializer)) - return result - - -def store_custom_attributes_values(obj, data_values, obj_field, serializer_class): +def _store_custom_attributes_values(obj, data_values, obj_field, serializer_class): data = { obj_field: obj.id, "attributes_values": data_values, @@ -231,17 +131,39 @@ def store_custom_attributes_values(obj, data_values, obj_field, serializer_class return None -def _use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes, values): - ret = {} - for attr in custom_attributes: - value = values.get(attr["name"], None) - if value is not None: - ret[str(attr["id"])] = value - - return ret +def _store_attachment(project, obj, attachment): + serialized = serializers.AttachmentExportSerializer(data=attachment) + if serialized.is_valid(): + serialized.object.content_type = ContentType.objects.get_for_model(obj.__class__) + serialized.object.object_id = obj.id + serialized.object.project = project + if serialized.object.owner is None: + serialized.object.owner = serialized.object.project.owner + serialized.object._importing = True + serialized.object.size = serialized.object.attached_file.size + serialized.object.name = os.path.basename(serialized.object.attached_file.name) + serialized.save() + return serialized + add_errors("attachments", serialized.errors) + return serialized -def store_role(project, role): +def _store_history(project, obj, history): + serialized = serializers.HistoryExportSerializer(data=history, context={"project": project}) + if serialized.is_valid(): + serialized.object.key = make_key_from_model_object(obj) + if serialized.object.diff is None: + serialized.object.diff = [] + serialized.object._importing = True + serialized.save() + return serialized + add_errors("history", serialized.errors) + return serialized + + +## ROLES + +def _store_role(project, role): serialized = serializers.RoleExportSerializer(data=role) if serialized.is_valid(): serialized.object.project = project @@ -255,14 +177,60 @@ def store_role(project, role): def store_roles(project, data): results = [] for role in data.get("roles", []): - serialized = store_role(project, role) + serialized = _store_role(project, role) if serialized: results.append(serialized) return results -def store_default_choices(project, data): +## MEMGERSHIPS + +def _store_membership(project, membership): + serialized = serializers.MembershipExportSerializer(data=membership, context={"project": project}) + if serialized.is_valid(): + serialized.object.project = project + serialized.object._importing = True + serialized.object.token = str(uuid.uuid1()) + serialized.object.user = find_invited_user(serialized.object.email, + default=serialized.object.user) + serialized.save() + return serialized + + add_errors("memberships", serialized.errors) + return None + + +def store_memberships(project, data): + results = [] + for membership in data.get("memberships", []): + results.append(_store_membership(project, membership)) + return results + + +## PROJECT ATTRIBUTES + +def _store_project_attribute_value(project, data, field, serializer): + serialized = serializer(data=data) + if serialized.is_valid(): + serialized.object.project = project + serialized.object._importing = True + serialized.save() + return serialized.object + add_errors(field, serialized.errors) + return None + + +def store_project_attributes_values(project, data, field, serializer): + result = [] + for choice_data in data.get(field, []): + result.append(_store_project_attribute_value(project, choice_data, field, serializer)) + return result + + +## DEFAULT PROJECT ATTRIBUTES VALUES + +def store_default_project_attributes_values(project, data): def helper(project, field, related, data): if field in data: value = related.all().get(name=data[field]) @@ -281,75 +249,27 @@ def store_default_choices(project, data): project.save() -def store_membership(project, membership): - serialized = serializers.MembershipExportSerializer(data=membership, context={"project": project}) +## CUSTOM ATTRIBUTES + +def _store_custom_attribute(project, data, field, serializer): + serialized = serializer(data=data) if serialized.is_valid(): serialized.object.project = project serialized.object._importing = True - serialized.object.token = str(uuid.uuid1()) - serialized.object.user = find_invited_user(serialized.object.email, - default=serialized.object.user) serialized.save() - return serialized - - add_errors("memberships", serialized.errors) + return serialized.object + add_errors(field, serialized.errors) return None -def store_memberships(project, data): - results = [] - for membership in data.get("memberships", []): - results.append(store_membership(project, membership)) - return results +def store_custom_attributes(project, data, field, serializer): + result = [] + for custom_attribute_data in data.get(field, []): + result.append(_store_custom_attribute(project, custom_attribute_data, field, serializer)) + return result -def store_task(project, data): - if "status" not in data and project.default_task_status: - data["status"] = project.default_task_status.name - - serialized = serializers.TaskExportSerializer(data=data, context={"project": project}) - if serialized.is_valid(): - serialized.object.project = project - if serialized.object.owner is None: - serialized.object.owner = serialized.object.project.owner - serialized.object._importing = True - serialized.object._not_notify = True - - serialized.save() - serialized.save_watchers() - - if serialized.object.ref: - sequence_name = refs.make_sequence_name(project) - if not seq.exists(sequence_name): - seq.create(sequence_name) - seq.set_max(sequence_name, serialized.object.ref) - else: - serialized.object.ref, _ = refs.make_reference(serialized.object, project) - serialized.object.save() - - for task_attachment in data.get("attachments", []): - store_attachment(project, serialized.object, task_attachment) - - history_entries = data.get("history", []) - for history in history_entries: - store_history(project, serialized.object, history) - - if not history_entries: - take_snapshot(serialized.object, user=serialized.object.owner) - - custom_attributes_values = data.get("custom_attributes_values", None) - if custom_attributes_values: - custom_attributes = serialized.object.project.taskcustomattributes.all().values('id', 'name') - custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values( - custom_attributes, custom_attributes_values) - store_custom_attributes_values(serialized.object, custom_attributes_values, - "task", serializers.TaskCustomAttributesValuesExportSerializer) - - return serialized - - add_errors("tasks", serialized.errors) - return None - +## MILESTONE def store_milestone(project, milestone): serialized = serializers.MilestoneExportSerializer(data=milestone, project=project) @@ -368,90 +288,17 @@ def store_milestone(project, milestone): return None -def store_attachment(project, obj, attachment): - serialized = serializers.AttachmentExportSerializer(data=attachment) - if serialized.is_valid(): - serialized.object.content_type = ContentType.objects.get_for_model(obj.__class__) - serialized.object.object_id = obj.id - serialized.object.project = project - if serialized.object.owner is None: - serialized.object.owner = serialized.object.project.owner - serialized.object._importing = True - serialized.object.size = serialized.object.attached_file.size - serialized.object.name = path.basename(serialized.object.attached_file.name) - serialized.save() - return serialized - add_errors("attachments", serialized.errors) - return serialized +def store_milestones(project, data): + results = [] + for milestone_data in data.get("milestones", []): + milestone = store_milestone(project, milestone_data) + results.append(milestone) + return results -def store_timeline_entry(project, timeline): - serialized = serializers.TimelineExportSerializer(data=timeline, context={"project": project}) - if serialized.is_valid(): - serialized.object.project = project - serialized.object.namespace = build_project_namespace(project) - serialized.object.object_id = project.id - serialized.object._importing = True - serialized.save() - return serialized - add_errors("timeline", serialized.errors) - return serialized +## USER STORIES - -def store_history(project, obj, history): - serialized = serializers.HistoryExportSerializer(data=history, context={"project": project}) - if serialized.is_valid(): - serialized.object.key = make_key_from_model_object(obj) - if serialized.object.diff is None: - serialized.object.diff = [] - serialized.object._importing = True - serialized.save() - return serialized - add_errors("history", serialized.errors) - return serialized - - -def store_wiki_page(project, wiki_page): - wiki_page["slug"] = slugify(unidecode(wiki_page.get("slug", ""))) - serialized = serializers.WikiPageExportSerializer(data=wiki_page) - if serialized.is_valid(): - serialized.object.project = project - if serialized.object.owner is None: - serialized.object.owner = serialized.object.project.owner - serialized.object._importing = True - serialized.object._not_notify = True - serialized.save() - serialized.save_watchers() - - for attachment in wiki_page.get("attachments", []): - store_attachment(project, serialized.object, attachment) - - history_entries = wiki_page.get("history", []) - for history in history_entries: - store_history(project, serialized.object, history) - - if not history_entries: - take_snapshot(serialized.object, user=serialized.object.owner) - - return serialized - - add_errors("wiki_pages", serialized.errors) - return None - - -def store_wiki_link(project, wiki_link): - serialized = serializers.WikiLinkExportSerializer(data=wiki_link) - if serialized.is_valid(): - serialized.object.project = project - serialized.object._importing = True - serialized.save() - return serialized - - add_errors("wiki_links", serialized.errors) - return None - - -def store_role_point(project, us, role_point): +def _store_role_point(project, us, role_point): serialized = serializers.RolePointsExportSerializer(data=role_point, context={"project": project}) if serialized.is_valid(): try: @@ -468,7 +315,6 @@ def store_role_point(project, us, role_point): add_errors("role_points", serialized.errors) return None - def store_user_story(project, data): if "status" not in data and project.default_us_status: data["status"] = project.default_us_status.name @@ -497,14 +343,14 @@ def store_user_story(project, data): serialized.object.save() for us_attachment in data.get("attachments", []): - store_attachment(project, serialized.object, us_attachment) + _store_attachment(project, serialized.object, us_attachment) for role_point in data.get("role_points", []): - store_role_point(project, serialized.object, role_point) + _store_role_point(project, serialized.object, role_point) history_entries = data.get("history", []) for history in history_entries: - store_history(project, serialized.object, history) + _store_history(project, serialized.object, history) if not history_entries: take_snapshot(serialized.object, user=serialized.object.owner) @@ -514,7 +360,7 @@ def store_user_story(project, data): custom_attributes = serialized.object.project.userstorycustomattributes.all().values('id', 'name') custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values( custom_attributes, custom_attributes_values) - store_custom_attributes_values(serialized.object, custom_attributes_values, + _store_custom_attributes_values(serialized.object, custom_attributes_values, "user_story", serializers.UserStoryCustomAttributesValuesExportSerializer) return serialized @@ -523,6 +369,74 @@ def store_user_story(project, data): return None +def store_user_stories(project, data): + results = [] + for userstory in data.get("user_stories", []): + us = store_user_story(project, userstory) + results.append(us) + return results + + +## TASKS + +def store_task(project, data): + if "status" not in data and project.default_task_status: + data["status"] = project.default_task_status.name + + serialized = serializers.TaskExportSerializer(data=data, context={"project": project}) + if serialized.is_valid(): + serialized.object.project = project + if serialized.object.owner is None: + serialized.object.owner = serialized.object.project.owner + serialized.object._importing = True + serialized.object._not_notify = True + + serialized.save() + serialized.save_watchers() + + if serialized.object.ref: + sequence_name = refs.make_sequence_name(project) + if not seq.exists(sequence_name): + seq.create(sequence_name) + seq.set_max(sequence_name, serialized.object.ref) + else: + serialized.object.ref, _ = refs.make_reference(serialized.object, project) + serialized.object.save() + + for task_attachment in data.get("attachments", []): + _store_attachment(project, serialized.object, task_attachment) + + history_entries = data.get("history", []) + for history in history_entries: + _store_history(project, serialized.object, history) + + if not history_entries: + take_snapshot(serialized.object, user=serialized.object.owner) + + custom_attributes_values = data.get("custom_attributes_values", None) + if custom_attributes_values: + custom_attributes = serialized.object.project.taskcustomattributes.all().values('id', 'name') + custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values( + custom_attributes, custom_attributes_values) + _store_custom_attributes_values(serialized.object, custom_attributes_values, + "task", serializers.TaskCustomAttributesValuesExportSerializer) + + return serialized + + add_errors("tasks", serialized.errors) + return None + + +def store_tasks(project, data): + results = [] + for task in data.get("tasks", []): + task = store_task(project, task) + results.append(task) + return results + + +## ISSUES + def store_issue(project, data): serialized = serializers.IssueExportSerializer(data=data, context={"project": project}) @@ -558,11 +472,11 @@ def store_issue(project, data): serialized.object.save() for attachment in data.get("attachments", []): - store_attachment(project, serialized.object, attachment) + _store_attachment(project, serialized.object, attachment) history_entries = data.get("history", []) for history in history_entries: - store_history(project, serialized.object, history) + _store_history(project, serialized.object, history) if not history_entries: take_snapshot(serialized.object, user=serialized.object.owner) @@ -572,10 +486,248 @@ def store_issue(project, data): custom_attributes = serialized.object.project.issuecustomattributes.all().values('id', 'name') custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values( custom_attributes, custom_attributes_values) - store_custom_attributes_values(serialized.object, custom_attributes_values, + _store_custom_attributes_values(serialized.object, custom_attributes_values, "issue", serializers.IssueCustomAttributesValuesExportSerializer) return serialized add_errors("issues", serialized.errors) return None + + +def store_issues(project, data): + issues = [] + for issue in data.get("issues", []): + issues.append(store_issue(project, issue)) + return issues + + +## WIKI PAGES + +def store_wiki_page(project, wiki_page): + wiki_page["slug"] = slugify(unidecode(wiki_page.get("slug", ""))) + serialized = serializers.WikiPageExportSerializer(data=wiki_page) + if serialized.is_valid(): + serialized.object.project = project + if serialized.object.owner is None: + serialized.object.owner = serialized.object.project.owner + serialized.object._importing = True + serialized.object._not_notify = True + serialized.save() + serialized.save_watchers() + + for attachment in wiki_page.get("attachments", []): + _store_attachment(project, serialized.object, attachment) + + history_entries = wiki_page.get("history", []) + for history in history_entries: + _store_history(project, serialized.object, history) + + if not history_entries: + take_snapshot(serialized.object, user=serialized.object.owner) + + return serialized + + add_errors("wiki_pages", serialized.errors) + return None + + +def store_wiki_pages(project, data): + results = [] + for wiki_page in data.get("wiki_pages", []): + results.append(store_wiki_page(project, wiki_page)) + return results + + +## WIKI LINKS + +def store_wiki_link(project, wiki_link): + serialized = serializers.WikiLinkExportSerializer(data=wiki_link) + if serialized.is_valid(): + serialized.object.project = project + serialized.object._importing = True + serialized.save() + return serialized + + add_errors("wiki_links", serialized.errors) + return None + + +def store_wiki_links(project, data): + results = [] + for wiki_link in data.get("wiki_links", []): + results.append(store_wiki_link(project, wiki_link)) + return results + + +## TAGS COLORS + +def store_tags_colors(project, data): + project.tags_colors = data.get("tags_colors", []) + project.save() + return None + + +## TIMELINE + +def _store_timeline_entry(project, timeline): + serialized = serializers.TimelineExportSerializer(data=timeline, context={"project": project}) + if serialized.is_valid(): + serialized.object.project = project + serialized.object.namespace = build_project_namespace(project) + serialized.object.object_id = project.id + serialized.object._importing = True + serialized.save() + return serialized + add_errors("timeline", serialized.errors) + return serialized + + +def store_timeline_entries(project, data): + results = [] + for timeline in data.get("timeline", []): + tl = _store_timeline_entry(project, timeline) + results.append(tl) + return results + + +############################################# +## Store project dict +############################################# + + +def _validate_if_owner_have_enought_space_to_this_project(owner, data): + # Validate if the owner can have this project + data["owner"] = owner.email + + is_private = data.get("is_private", False) + total_memberships = len([m for m in data.get("memberships", []) + if m.get("email", None) != data["owner"]]) + total_memberships = total_memberships + 1 # 1 is the owner + (enough_slots, error_message) = users_service.has_available_slot_for_import_new_project( + owner, + is_private, + total_memberships + ) + if not enough_slots: + raise err.TaigaImportError(error_message, None) + + +def _create_project_object(data): + # Create the project + project_serialized = store_project(data) + + if not project_serialized: + raise err.TaigaImportError(_("error importing project data"), None) + + return project_serialized.object if project_serialized else None + + +def _create_membership_for_project_owner(project): + if project.memberships.filter(user=project.owner).count() == 0: + if project.roles.all().count() > 0: + Membership.objects.create( + project=project, + email=project.owner.email, + user=project.owner, + role=project.roles.all().first(), + is_admin=True + ) + + +def _populate_project_object(project, data): + def check_if_there_is_some_error(message=_("error importing project data"), project=None): + errors = get_errors(clear=False) + if errors: + raise err.TaigaImportError(message, project, errors=errors) + + # Create roles + store_roles(project, data) + check_if_there_is_some_error(_("error importing roles"), None) + + # Create memberships + store_memberships(project, data) + _create_membership_for_project_owner(project) + check_if_there_is_some_error(_("error importing memberships"), project) + + # Create project attributes values + store_project_attributes_values(project, data, "us_statuses", serializers.UserStoryStatusExportSerializer) + store_project_attributes_values(project, data, "points", serializers.PointsExportSerializer) + store_project_attributes_values(project, data, "task_statuses", serializers.TaskStatusExportSerializer) + store_project_attributes_values(project, data, "issue_types", serializers.IssueTypeExportSerializer) + store_project_attributes_values(project, data, "issue_statuses", serializers.IssueStatusExportSerializer) + store_project_attributes_values(project, data, "priorities", serializers.PriorityExportSerializer) + store_project_attributes_values(project, data, "severities", serializers.SeverityExportSerializer) + check_if_there_is_some_error(_("error importing lists of project attributes"), project) + + # Create default values for project attributes + store_default_project_attributes_values(project, data) + check_if_there_is_some_error(_("error importing default project attributes values"), project) + + # Create custom attributes + store_custom_attributes(project, data, "userstorycustomattributes", + serializers.UserStoryCustomAttributeExportSerializer) + store_custom_attributes(project, data, "taskcustomattributes", + serializers.TaskCustomAttributeExportSerializer) + store_custom_attributes(project, data, "issuecustomattributes", + serializers.IssueCustomAttributeExportSerializer) + check_if_there_is_some_error(_("error importing custom attributes"), project) + + + # Create milestones + store_milestones(project, data) + check_if_there_is_some_error(_("error importing sprints"), project) + + # Create user stories + store_user_stories(project, data) + check_if_there_is_some_error(_("error importing user stories"), project) + + # Createer tasks + store_tasks(project, data) + check_if_there_is_some_error(_("error importing tasks"), project) + + # Create issues + store_issues(project, data) + check_if_there_is_some_error(_("error importing issues"), project) + + # Create wiki pages + store_wiki_pages(project, data) + check_if_there_is_some_error(_("error importing wiki pages"), project) + + # Create wiki links + store_wiki_links(project, data) + check_if_there_is_some_error(_("error importing wiki links"), project) + + # Create tags + store_tags_colors(project, data) + check_if_there_is_some_error(_("error importing tags"), project) + + # Create timeline + store_timeline_entries(project, data) + check_if_there_is_some_error(_("error importing timelines"), project) + + # Regenerate stats + project.refresh_totals() + + +def store_project_from_dict(data, owner=None): + reset_errors() + + # Validate + if owner: + _validate_if_owner_have_enought_space_to_this_project(owner, data) + + # Create project + project = _create_project_object(data) + + # Populate project + try: + _populate_project_object(project, data) + except err.TaigaImportError: + # reraise known inport errors + raise + except: + # reise unknown errors as import error + raise err.TaigaImportError(_("unexpected error importing project"), project) + + return project diff --git a/taiga/export_import/tasks.py b/taiga/export_import/tasks.py index 79880ba4..4e5012d1 100644 --- a/taiga/export_import/tasks.py +++ b/taiga/export_import/tasks.py @@ -27,10 +27,11 @@ from django.conf import settings from django.utils.translation import ugettext as _ from taiga.base.mails import mail_builder +from taiga.base.utils import json from taiga.celery import app -from .service import render_project -from .dump_service import dict_to_project +from . import exceptions as err +from . import services from .renderers import ExportRenderer logger = logging.getLogger('taiga.export_import') @@ -46,7 +47,7 @@ def dump_project(self, user, project): try: url = default_storage.url(path) with default_storage.open(storage_path, mode="w") as outfile: - render_project(project, outfile) + services.render_project(project, outfile) except Exception: # Error @@ -77,28 +78,57 @@ def delete_project_dump(project_id, project_slug, task_id): default_storage.delete("exports/{}/{}-{}.json".format(project_id, project_slug, task_id)) +ADMIN_ERROR_LOAD_PROJECT_DUMP_MESSAGE = _(""" + +Error loading dump by {user_full_name} <{user_email}>:" + + +REASON: +------- +{reason} + +DETAILS: +-------- +{details} + +TRACE ERROR: +------------""") + + @app.task def load_project_dump(user, dump): try: - project = dict_to_project(dump, user) - except Exception: - # Error + project = services.store_project_from_dict(dump, user) + except err.TaigaImportError as e: + # On Error + ## remove project + if e.project: + e.project.delete_related_content() + e.project.delete() + + ## send email to the user + error_subject = _("Error loading project dump") + error_message = e.message or _("Error loading your project dump file") + ctx = { "user": user, - "error_subject": _("Error loading project dump"), - "error_message": _("Error loading project dump"), + "error_subject": error_message, + "error_message": error_subject, } email = mail_builder.import_error(user, ctx) email.send() - logger.error('Error loading dump by %s <%s>', - user, - user.email, - exc_info=sys.exc_info()) - # TODO: [Rollback] Remove project because it can be corrupted + ## logged the error to sysadmins + text = ADMIN_ERROR_LOAD_PROJECT_DUMP_MESSAGE.format( + user_full_name=user, + user_email=user.email, + reason=e.message or _(" -- no detail info --"), + details=json.dumps(e.errors, indent=4) + ) + logger.error(text, exc_info=sys.exc_info()) else: - # Success + # On Success ctx = {"user": user, "project": project} email = mail_builder.load_dump(user, ctx) email.send() diff --git a/tests/integration/test_importer_api.py b/tests/integration/test_importer_api.py index a881e67d..b9e2d1c7 100644 --- a/tests/integration/test_importer_api.py +++ b/tests/integration/test_importer_api.py @@ -23,7 +23,8 @@ from django.core.urlresolvers import reverse from django.core.files.base import ContentFile from taiga.base.utils import json -from taiga.export_import.dump_service import dict_to_project, TaigaImportError +from taiga.export_import import services +from taiga.export_import.exceptions import TaigaImportError from taiga.projects.models import Project, Membership from taiga.projects.issues.models import Issue from taiga.projects.userstories.models import UserStory @@ -36,6 +37,11 @@ from ..utils import DUMMY_BMP_DATA pytestmark = pytest.mark.django_db + +####################################################### +## test api/v1/importer +####################################################### + def test_invalid_project_import(client): user = f.UserFactory.create() client.login(user) @@ -43,7 +49,7 @@ def test_invalid_project_import(client): url = reverse("importer-list") data = {} - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 @@ -60,17 +66,16 @@ def test_valid_project_import_without_extra_data(client): "watchers": ["testing@taiga.io"] } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data must_empty_children = [ "issues", "user_stories", "us_statuses", "wiki_pages", "priorities", "severities", "milestones", "points", "issue_types", "task_statuses", "issue_statuses", "wiki_links", ] - assert all(map(lambda x: len(response_data[x]) == 0, must_empty_children)) - assert response_data["owner"] == user.email - assert response_data["watchers"] == [user.email, user_watching.email] + assert all(map(lambda x: len(response.data[x]) == 0, must_empty_children)) + assert response.data["owner"] == user.email + assert response.data["watchers"] == [user.email, user_watching.email] def test_valid_project_without_enough_public_projects_slots(client): @@ -170,11 +175,10 @@ def test_valid_project_import_with_not_existing_memberships(client): "roles": [{"name": "Role"}] } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data # The new membership and the owner membership - assert len(response_data["memberships"]) == 2 + assert len(response.data["memberships"]) == 2 def test_valid_project_import_with_membership_uuid_rewrite(client): @@ -193,9 +197,8 @@ def test_valid_project_import_with_membership_uuid_rewrite(client): "roles": [{"name": "Role"}] } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data assert Membership.objects.filter(email="with-uuid@email.com", token="123").count() == 0 @@ -234,9 +237,8 @@ def test_valid_project_import_with_extra_data(client): }], } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data must_empty_children = [ "issues", "user_stories", "wiki_pages", "milestones", "wiki_links", @@ -247,10 +249,10 @@ def test_valid_project_import_with_extra_data(client): "issue_types", "task_statuses", "issue_statuses", "memberships", ] - assert all(map(lambda x: len(response_data[x]) == 0, must_empty_children)) + assert all(map(lambda x: len(response.data[x]) == 0, must_empty_children)) # Allwais is created at least the owner membership - assert all(map(lambda x: len(response_data[x]) == 1, must_one_instance_children)) - assert response_data["owner"] == user.email + assert all(map(lambda x: len(response.data[x]) == 1, must_one_instance_children)) + assert response.data["owner"] == user.email def test_invalid_project_import_without_roles(client): @@ -263,10 +265,9 @@ def test_invalid_project_import_without_roles(client): "description": "Imported project", } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 2 + assert len(response.data) == 2 assert Project.objects.filter(slug="imported-project").count() == 0 def test_invalid_project_import_with_extra_data(client): @@ -290,10 +291,9 @@ def test_invalid_project_import_with_extra_data(client): "issue_statuses": [{}], } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 7 + assert len(response.data) == 7 assert Project.objects.filter(slug="imported-project").count() == 0 @@ -360,6 +360,327 @@ def test_invalid_project_import_with_custom_attributes(client): assert Project.objects.filter(slug="imported-project").count() == 0 +####################################################### +## tes api/v1/importer/milestone +####################################################### + +def test_invalid_milestone_import(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("importer-milestone", args=[project.pk]) + data = {} + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + + +def test_valid_milestone_import(client): + user = f.UserFactory.create() + user_watching = f.UserFactory.create(email="testing@taiga.io") + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("importer-milestone", args=[project.pk]) + data = { + "name": "Imported milestone", + "estimated_start": "2014-10-10", + "estimated_finish": "2014-10-20", + "watchers": ["testing@taiga.io"] + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + assert response.data["watchers"] == [user_watching.email] + +def test_milestone_import_duplicated_milestone(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("importer-milestone", args=[project.pk]) + data = { + "name": "Imported milestone", + "estimated_start": "2014-10-10", + "estimated_finish": "2014-10-20", + } + # We create twice the same milestone + response = client.json.post(url, json.dumps(data)) + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + assert response.data["milestones"][0]["name"][0] == "Name duplicated for the project" + + + +####################################################### +## tes api/v1/importer/us +####################################################### + +def test_invalid_us_import(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("importer-us", args=[project.pk]) + data = {} + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + + +def test_valid_us_import_without_extra_data(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_us_status = f.UserStoryStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-us", args=[project.pk]) + data = { + "subject": "Test" + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + assert response.data["owner"] == user.email + assert response.data["ref"] is not None + + +def test_valid_us_import_with_extra_data(client): + user = f.UserFactory.create() + user_watching = f.UserFactory.create(email="testing@taiga.io") + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_us_status = f.UserStoryStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-us", args=[project.pk]) + data = { + "subject": "Imported us", + "description": "Imported us", + "attachments": [{ + "owner": user.email, + "attached_file": { + "name": "imported attachment", + "data": base64.b64encode(b"TEST").decode("utf-8") + } + }], + "watchers": ["testing@taiga.io"] + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + assert len(response.data["attachments"]) == 1 + assert response.data["owner"] == user.email + assert response.data["ref"] is not None + assert response.data["watchers"] == [user_watching.email] + + +def test_invalid_us_import_with_extra_data(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_us_status = f.UserStoryStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-us", args=[project.pk]) + data = { + "subject": "Imported us", + "description": "Imported us", + "attachments": [{}], + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + assert len(response.data) == 1 + assert UserStory.objects.filter(subject="Imported us").count() == 0 + + +def test_invalid_us_import_with_bad_choices(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_us_status = f.UserStoryStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-us", args=[project.pk]) + data = { + "subject": "Imported us", + "description": "Imported us", + "status": "Not valid" + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + assert len(response.data) == 1 + + +####################################################### +## tes api/v1/importer/task +####################################################### + +def test_invalid_task_import(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + client.login(user) + + url = reverse("importer-task", args=[project.pk]) + data = {} + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + + +def test_valid_task_import_without_extra_data(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_task_status = f.TaskStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-task", args=[project.pk]) + data = { + "subject": "Test" + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + assert response.data["owner"] == user.email + assert response.data["ref"] is not None + + +def test_valid_task_import_with_custom_attributes_values(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + membership = f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_task_status = f.TaskStatusFactory.create(project=project) + project.save() + custom_attr = f.TaskCustomAttributeFactory(project=project) + + url = reverse("importer-task", args=[project.pk]) + data = { + "subject": "Test Custom Attrs Values Tasks", + "custom_attributes_values": { + custom_attr.name: "test_value" + } + } + + client.login(user) + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + custom_attributes_values = apps.get_model("custom_attributes.TaskCustomAttributesValues").objects.get( + task__subject=response.data["subject"]) + assert custom_attributes_values.attributes_values == {str(custom_attr.id): "test_value"} + + +def test_valid_task_import_with_extra_data(client): + user = f.UserFactory.create() + user_watching = f.UserFactory.create(email="testing@taiga.io") + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_task_status = f.TaskStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-task", args=[project.pk]) + data = { + "subject": "Imported task", + "description": "Imported task", + "attachments": [{ + "owner": user.email, + "attached_file": { + "name": "imported attachment", + "data": base64.b64encode(b"TEST").decode("utf-8") + } + }], + "watchers": ["testing@taiga.io"] + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + assert len(response.data["attachments"]) == 1 + assert response.data["owner"] == user.email + assert response.data["ref"] is not None + assert response.data["watchers"] == [user_watching.email] + + +def test_invalid_task_import_with_extra_data(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_task_status = f.TaskStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-task", args=[project.pk]) + data = { + "subject": "Imported task", + "description": "Imported task", + "attachments": [{}], + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + assert len(response.data) == 1 + assert Task.objects.filter(subject="Imported task").count() == 0 + + +def test_invalid_task_import_with_bad_choices(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_task_status = f.TaskStatusFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-task", args=[project.pk]) + data = { + "subject": "Imported task", + "description": "Imported task", + "status": "Not valid" + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 400 + assert len(response.data) == 1 + + +def test_valid_task_with_user_story(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + f.MembershipFactory(project=project, user=user, is_admin=True) + project.default_task_status = f.TaskStatusFactory.create(project=project) + us = f.UserStoryFactory.create(project=project) + project.save() + client.login(user) + + url = reverse("importer-task", args=[project.pk]) + data = { + "subject": "Imported task", + "description": "Imported task", + "user_story": us.ref + } + + response = client.json.post(url, json.dumps(data)) + assert response.status_code == 201 + assert us.tasks.all().count() == 1 + + +####################################################### +## tes api/v1/importer/issue +####################################################### + def test_invalid_issue_import(client): user = f.UserFactory.create() project = f.ProjectFactory.create(owner=user) @@ -369,7 +690,7 @@ def test_invalid_issue_import(client): url = reverse("importer-issue", args=[project.pk]) data = {} - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 @@ -387,11 +708,10 @@ def test_valid_user_story_import(client): "finish_date": "2014-10-24T00:00:00+0000" } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data - assert response_data["subject"] == "Imported issue" - assert response_data["finish_date"] == "2014-10-24T00:00:00+0000" + assert response.data["subject"] == "Imported issue" + assert response.data["finish_date"] == "2014-10-24T00:00:00+0000" def test_valid_user_story_import_with_custom_attributes_values(client): @@ -434,11 +754,10 @@ def test_valid_issue_import_without_extra_data(client): "subject": "Test" } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data - assert response_data["owner"] == user.email - assert response_data["ref"] is not None + assert response.data["owner"] == user.email + assert response.data["ref"] is not None def test_valid_issue_import_with_custom_attributes_values(client): @@ -495,14 +814,13 @@ def test_valid_issue_import_with_extra_data(client): "watchers": ["testing@taiga.io"] } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data - assert len(response_data["attachments"]) == 1 - assert response_data["owner"] == user.email - assert response_data["ref"] is not None - assert response_data["finished_date"] == "2014-10-24T00:00:00+0000" - assert response_data["watchers"] == [user_watching.email] + assert len(response.data["attachments"]) == 1 + assert response.data["owner"] == user.email + assert response.data["ref"] is not None + assert response.data["finished_date"] == "2014-10-24T00:00:00+0000" + assert response.data["watchers"] == [user_watching.email] def test_invalid_issue_import_with_extra_data(client): @@ -523,10 +841,9 @@ def test_invalid_issue_import_with_extra_data(client): "attachments": [{}], } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 + assert len(response.data) == 1 assert Issue.objects.filter(subject="Imported issue").count() == 0 @@ -548,10 +865,9 @@ def test_invalid_issue_import_with_bad_choices(client): "status": "Not valid" } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 + assert len(response.data) == 1 url = reverse("importer-issue", args=[project.pk]) data = { @@ -560,10 +876,9 @@ def test_invalid_issue_import_with_bad_choices(client): "priority": "Not valid" } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 + assert len(response.data) == 1 url = reverse("importer-issue", args=[project.pk]) data = { @@ -572,10 +887,9 @@ def test_invalid_issue_import_with_bad_choices(client): "severity": "Not valid" } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 + assert len(response.data) == 1 url = reverse("importer-issue", args=[project.pk]) data = { @@ -584,272 +898,14 @@ def test_invalid_issue_import_with_bad_choices(client): "type": "Not valid" } - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 - - -def test_invalid_us_import(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - client.login(user) - - url = reverse("importer-us", args=[project.pk]) - data = {} - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - - -def test_valid_us_import_without_extra_data(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_us_status = f.UserStoryStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-us", args=[project.pk]) - data = { - "subject": "Test" - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 201 - response_data = response.data - assert response_data["owner"] == user.email - assert response_data["ref"] is not None - - -def test_valid_us_import_with_extra_data(client): - user = f.UserFactory.create() - user_watching = f.UserFactory.create(email="testing@taiga.io") - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_us_status = f.UserStoryStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-us", args=[project.pk]) - data = { - "subject": "Imported us", - "description": "Imported us", - "attachments": [{ - "owner": user.email, - "attached_file": { - "name": "imported attachment", - "data": base64.b64encode(b"TEST").decode("utf-8") - } - }], - "watchers": ["testing@taiga.io"] - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 201 - response_data = response.data - assert len(response_data["attachments"]) == 1 - assert response_data["owner"] == user.email - assert response_data["ref"] is not None - assert response_data["watchers"] == [user_watching.email] - - -def test_invalid_us_import_with_extra_data(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_us_status = f.UserStoryStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-us", args=[project.pk]) - data = { - "subject": "Imported us", - "description": "Imported us", - "attachments": [{}], - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 - assert UserStory.objects.filter(subject="Imported us").count() == 0 - - -def test_invalid_us_import_with_bad_choices(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_us_status = f.UserStoryStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-us", args=[project.pk]) - data = { - "subject": "Imported us", - "description": "Imported us", - "status": "Not valid" - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 - - -def test_invalid_task_import(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - client.login(user) - - url = reverse("importer-task", args=[project.pk]) - data = {} - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - - -def test_valid_task_import_without_extra_data(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_task_status = f.TaskStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-task", args=[project.pk]) - data = { - "subject": "Test" - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 201 - response_data = response.data - assert response_data["owner"] == user.email - assert response_data["ref"] is not None - - -def test_valid_task_import_with_custom_attributes_values(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - membership = f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_task_status = f.TaskStatusFactory.create(project=project) - project.save() - custom_attr = f.TaskCustomAttributeFactory(project=project) - - url = reverse("importer-task", args=[project.pk]) - data = { - "subject": "Test Custom Attrs Values Tasks", - "custom_attributes_values": { - custom_attr.name: "test_value" - } - } - - client.login(user) response = client.json.post(url, json.dumps(data)) - assert response.status_code == 201 - custom_attributes_values = apps.get_model("custom_attributes.TaskCustomAttributesValues").objects.get( - task__subject=response.data["subject"]) - assert custom_attributes_values.attributes_values == {str(custom_attr.id): "test_value"} - - -def test_valid_task_import_with_extra_data(client): - user = f.UserFactory.create() - user_watching = f.UserFactory.create(email="testing@taiga.io") - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_task_status = f.TaskStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-task", args=[project.pk]) - data = { - "subject": "Imported task", - "description": "Imported task", - "attachments": [{ - "owner": user.email, - "attached_file": { - "name": "imported attachment", - "data": base64.b64encode(b"TEST").decode("utf-8") - } - }], - "watchers": ["testing@taiga.io"] - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 201 - response_data = response.data - assert len(response_data["attachments"]) == 1 - assert response_data["owner"] == user.email - assert response_data["ref"] is not None - assert response_data["watchers"] == [user_watching.email] - - -def test_invalid_task_import_with_extra_data(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_task_status = f.TaskStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-task", args=[project.pk]) - data = { - "subject": "Imported task", - "description": "Imported task", - "attachments": [{}], - } - - response = client.post(url, json.dumps(data), content_type="application/json") assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 - assert Task.objects.filter(subject="Imported task").count() == 0 + assert len(response.data) == 1 -def test_invalid_task_import_with_bad_choices(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_task_status = f.TaskStatusFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-task", args=[project.pk]) - data = { - "subject": "Imported task", - "description": "Imported task", - "status": "Not valid" - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 - - -def test_valid_task_with_user_story(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - project.default_task_status = f.TaskStatusFactory.create(project=project) - us = f.UserStoryFactory.create(project=project) - project.save() - client.login(user) - - url = reverse("importer-task", args=[project.pk]) - data = { - "subject": "Imported task", - "description": "Imported task", - "user_story": us.ref - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 201 - assert us.tasks.all().count() == 1 - +####################################################### +## tes api/v1/importer/wiki-page +####################################################### def test_invalid_wiki_page_import(client): user = f.UserFactory.create() @@ -860,7 +916,7 @@ def test_invalid_wiki_page_import(client): url = reverse("importer-wiki-page", args=[project.pk]) data = {} - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 @@ -875,10 +931,9 @@ def test_valid_wiki_page_import_without_extra_data(client): "slug": "imported-wiki-page", } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data - assert response_data["owner"] == user.email + assert response.data["owner"] == user.email def test_valid_wiki_page_import_with_extra_data(client): @@ -902,12 +957,11 @@ def test_valid_wiki_page_import_with_extra_data(client): "watchers": ["testing@taiga.io"] } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data - assert len(response_data["attachments"]) == 1 - assert response_data["owner"] == user.email - assert response_data["watchers"] == [user_watching.email] + assert len(response.data["attachments"]) == 1 + assert response.data["owner"] == user.email + assert response.data["watchers"] == [user_watching.email] def test_invalid_wiki_page_import_with_extra_data(client): @@ -923,13 +977,16 @@ def test_invalid_wiki_page_import_with_extra_data(client): "attachments": [{}], } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 - response_data = response.data - assert len(response_data) == 1 + assert len(response.data) == 1 assert WikiPage.objects.filter(slug="imported-wiki-page").count() == 0 +####################################################### +## tes api/v1/importer/wiki-link +####################################################### + def test_invalid_wiki_link_import(client): user = f.UserFactory.create() project = f.ProjectFactory.create(owner=user) @@ -939,7 +996,7 @@ def test_invalid_wiki_link_import(client): url = reverse("importer-wiki-link", args=[project.pk]) data = {} - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 400 @@ -955,65 +1012,16 @@ def test_valid_wiki_link_import(client): "href": "imported-wiki-link", } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 response.data +################################################################## +## tes taiga.export_import.services.store_project_from_dict +################################################################## -def test_invalid_milestone_import(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - client.login(user) - - url = reverse("importer-milestone", args=[project.pk]) - data = {} - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - - -def test_valid_milestone_import(client): - user = f.UserFactory.create() - user_watching = f.UserFactory.create(email="testing@taiga.io") - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - client.login(user) - - url = reverse("importer-milestone", args=[project.pk]) - data = { - "name": "Imported milestone", - "estimated_start": "2014-10-10", - "estimated_finish": "2014-10-20", - "watchers": ["testing@taiga.io"] - } - - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 201 - assert response.data["watchers"] == [user_watching.email] - -def test_milestone_import_duplicated_milestone(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - f.MembershipFactory(project=project, user=user, is_admin=True) - client.login(user) - - url = reverse("importer-milestone", args=[project.pk]) - data = { - "name": "Imported milestone", - "estimated_start": "2014-10-10", - "estimated_finish": "2014-10-20", - } - # We create twice the same milestone - response = client.post(url, json.dumps(data), content_type="application/json") - response = client.post(url, json.dumps(data), content_type="application/json") - assert response.status_code == 400 - response_data = response.data - assert response_data["milestones"][0]["name"][0] == "Name duplicated for the project" - - -def test_dict_to_project_with_no_projects_slots_available(client): +def test_services_store_project_from_dict_with_no_projects_slots_available(client): user = f.UserFactory.create(max_private_projects=0) data = { @@ -1024,12 +1032,12 @@ def test_dict_to_project_with_no_projects_slots_available(client): } with pytest.raises(TaigaImportError) as excinfo: - project = dict_to_project(data, owner=user) + project = services.store_project_from_dict(data, owner=user) assert "can't have more private projects" in str(excinfo.value) -def test_dict_to_project_with_no_members_private_project_slots_available(client): +def test_services_store_project_from_dict_with_no_members_private_project_slots_available(client): user = f.UserFactory.create(max_memberships_private_projects=2) data = { @@ -1059,12 +1067,12 @@ def test_dict_to_project_with_no_members_private_project_slots_available(client) } with pytest.raises(TaigaImportError) as excinfo: - project = dict_to_project(data, owner=user) + project = services.store_project_from_dict(data, owner=user) assert "reaches your current limit of memberships for private" in str(excinfo.value) -def test_dict_to_project_with_no_members_public_project_slots_available(client): +def test_services_store_project_from_dict_with_no_members_public_project_slots_available(client): user = f.UserFactory.create(max_memberships_public_projects=2) data = { @@ -1094,11 +1102,15 @@ def test_dict_to_project_with_no_members_public_project_slots_available(client): } with pytest.raises(TaigaImportError) as excinfo: - project = dict_to_project(data, owner=user) + project = services.store_project_from_dict(data, owner=user) assert "reaches your current limit of memberships for public" in str(excinfo.value) +################################################################## +## tes api/v1/importer/load-dummp +################################################################## + def test_invalid_dump_import(client): user = f.UserFactory.create() client.login(user) @@ -1110,132 +1122,11 @@ def test_invalid_dump_import(client): response = client.post(url, {'dump': data}) assert response.status_code == 400 - response_data = response.data - assert response_data["_error_message"] == "Invalid dump format" + assert response.data["_error_message"] == "Invalid dump format" -def test_valid_dump_import_with_logo(client, settings): +def test_valid_dump_import_without_enough_public_projects_slots(client, settings): settings.CELERY_ENABLED = False - - user = f.UserFactory.create() - client.login(user) - - url = reverse("importer-load-dump") - - data = ContentFile(bytes(json.dumps({ - "slug": "valid-project", - "name": "Valid project", - "description": "Valid project desc", - "is_private": False, - "logo": { - "name": "logo.bmp", - "data": base64.b64encode(DUMMY_BMP_DATA).decode("utf-8") - } - }), "utf-8")) - data.name = "test" - - response = client.post(url, {'dump': data}) - assert response.status_code == 201 - response_data = response.data - assert "id" in response_data - assert response_data["name"] == "Valid project" - assert "logo_small_url" in response_data - assert response_data["logo_small_url"] != None - assert "logo_big_url" in response_data - assert response_data["logo_big_url"] != None - - -def test_valid_dump_import_with_celery_disabled(client, settings): - settings.CELERY_ENABLED = False - - user = f.UserFactory.create() - client.login(user) - - url = reverse("importer-load-dump") - - data = ContentFile(bytes(json.dumps({ - "slug": "valid-project", - "name": "Valid project", - "description": "Valid project desc", - "is_private": True - }), "utf-8")) - data.name = "test" - - response = client.post(url, {'dump': data}) - assert response.status_code == 201 - response_data = response.data - assert "id" in response_data - assert response_data["name"] == "Valid project" - - -def test_valid_dump_import_with_celery_enabled(client, settings): - settings.CELERY_ENABLED = True - - user = f.UserFactory.create() - client.login(user) - - url = reverse("importer-load-dump") - - data = ContentFile(bytes(json.dumps({ - "slug": "valid-project", - "name": "Valid project", - "description": "Valid project desc", - "is_private": True - }), "utf-8")) - data.name = "test" - - response = client.post(url, {'dump': data}) - assert response.status_code == 202 - response_data = response.data - assert "import_id" in response_data - - -def test_dump_import_duplicated_project(client): - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - client.login(user) - - url = reverse("importer-load-dump") - - data = ContentFile(bytes(json.dumps({ - "slug": project.slug, - "name": "Test import", - "description": "Valid project desc", - "is_private": True - }), "utf-8")) - data.name = "test" - - response = client.post(url, {'dump': data}) - assert response.status_code == 201 - response_data = response.data - assert response_data["name"] == "Test import" - assert response_data["slug"] == "{}-test-import".format(user.username) - - -def test_dump_import_throttling(client, settings): - settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["import-dump-mode"] = "1/minute" - - user = f.UserFactory.create() - project = f.ProjectFactory.create(owner=user) - client.login(user) - - url = reverse("importer-load-dump") - - data = ContentFile(bytes(json.dumps({ - "slug": project.slug, - "name": "Test import", - "description": "Valid project desc", - "is_private": True - }), "utf-8")) - data.name = "test" - - response = client.post(url, {'dump': data}) - assert response.status_code == 201 - response = client.post(url, {'dump': data}) - assert response.status_code == 429 - - -def test_valid_dump_import_without_enough_public_projects_slots(client): user = f.UserFactory.create(max_public_projects=0) client.login(user) @@ -1257,7 +1148,8 @@ def test_valid_dump_import_without_enough_public_projects_slots(client): assert Project.objects.filter(slug="public-project-without-slots").count() == 0 -def test_valid_dump_import_without_enough_private_projects_slots(client): +def test_valid_dump_import_without_enough_private_projects_slots(client, settings): + settings.CELERY_ENABLED = False user = f.UserFactory.create(max_private_projects=0) client.login(user) @@ -1279,7 +1171,8 @@ def test_valid_dump_import_without_enough_private_projects_slots(client): assert Project.objects.filter(slug="private-project-without-slots").count() == 0 -def test_valid_dump_import_without_enough_membership_private_project_slots_one_project(client): +def test_valid_dump_import_without_enough_membership_private_project_slots_one_project(client, settings): + settings.CELERY_ENABLED = False user = f.UserFactory.create(max_memberships_private_projects=5) client.login(user) @@ -1326,7 +1219,8 @@ def test_valid_dump_import_without_enough_membership_private_project_slots_one_p assert Project.objects.filter(slug="project-without-memberships-slots").count() == 0 -def test_valid_dump_import_without_enough_membership_public_project_slots_one_project(client): +def test_valid_dump_import_without_enough_membership_public_project_slots_one_project(client, settings): + settings.CELERY_ENABLED = False user = f.UserFactory.create(max_memberships_public_projects=5) client.login(user) @@ -1424,9 +1318,8 @@ def test_valid_dump_import_with_enough_membership_private_project_slots_multiple response = client.post(url, {'dump': data}) assert response.status_code == 201 - response_data = response.data - assert "id" in response_data - assert response_data["name"] == "Valid project" + assert "id" in response.data + assert response.data["name"] == "Valid project" def test_valid_dump_import_with_enough_membership_public_project_slots_multiple_projects(client, settings): @@ -1480,30 +1373,13 @@ def test_valid_dump_import_with_enough_membership_public_project_slots_multiple_ response = client.post(url, {'dump': data}) assert response.status_code == 201 - response_data = response.data - assert "id" in response_data - assert response_data["name"] == "Valid project" + assert "id" in response.data + assert response.data["name"] == "Valid project" -def test_valid_dump_import_without_slug(client): - project = f.ProjectFactory.create(slug="existing-slug") - user = f.UserFactory.create() - client.login(user) - url = reverse("importer-load-dump") - - data = ContentFile(bytes(json.dumps({ - "name": "Project name", - "description": "Valid project desc", - "is_private": True - }), "utf-8")) - data.name = "test" - - response = client.post(url, {'dump': data}) - assert response.status_code == 201 - - -def test_valid_dump_import_with_the_limit_of_membership_whit_you_for_private_project(client): +def test_valid_dump_import_with_the_limit_of_membership_whit_you_for_private_project(client, settings): + settings.CELERY_ENABLED = False user = f.UserFactory.create(max_memberships_private_projects=5) client.login(user) @@ -1545,7 +1421,8 @@ def test_valid_dump_import_with_the_limit_of_membership_whit_you_for_private_pro assert Project.objects.filter(slug="private-project-with-memberships-limit-with-you").count() == 1 -def test_valid_dump_import_with_the_limit_of_membership_whit_you_for_public_project(client): +def test_valid_dump_import_with_the_limit_of_membership_whit_you_for_public_project(client, settings): + settings.CELERY_ENABLED = False user = f.UserFactory.create(max_memberships_public_projects=5) client.login(user) @@ -1587,6 +1464,203 @@ def test_valid_dump_import_with_the_limit_of_membership_whit_you_for_public_proj assert Project.objects.filter(slug="public-project-with-memberships-limit-with-you").count() == 1 +def test_valid_dump_import_with_celery_disabled(client, settings): + settings.CELERY_ENABLED = False + + user = f.UserFactory.create() + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": "valid-project", + "name": "Valid project", + "description": "Valid project desc", + "is_private": True + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 201 + assert "id" in response.data + assert response.data["name"] == "Valid project" + + +def test_invalid_dump_import_with_celery_disabled(client, settings): + settings.CELERY_ENABLED = False + user = f.UserFactory.create(max_memberships_public_projects=5) + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": "invalid-project", + "name": "Invalid project", + "description": "Valid project desc", + "is_private": False, + "memberships": [ + { + "email": user.email, + "role": "Role", + }, + { + "email": "test2@test.com", + "role": "Role", + }, + { + "email": "test3@test.com", + "role": "Role", + }, + { + "email": "test4@test.com", + "role": "Role", + }, + { + "email": "test5@test.com", + "role": "Role", + }, + ], + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 400 + + +def test_valid_dump_import_with_celery_enabled(client, settings): + settings.CELERY_ENABLED = True + + user = f.UserFactory.create() + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": "valid-project", + "name": "Valid project", + "description": "Valid project desc", + "is_private": True + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 202 + assert "import_id" in response.data + assert Project.objects.filter(slug="valid-project").count() == 1 + + +def test_invalid_dump_import_with_celery_enabled(client, settings): + settings.CELERY_ENABLED = True + user = f.UserFactory.create(max_memberships_public_projects=5) + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": "invalid-project", + "name": "Invalid project", + "description": "Valid project desc", + "is_private": False, + "memberships": [ + { + "email": user.email, + "role": "Role", + }, + { + "email": "test2@test.com", + "role": "Role", + }, + { + "email": "test3@test.com", + "role": "Role", + }, + { + "email": "test4@test.com", + "role": "Role", + }, + { + "email": "test5@test.com", + "role": "Role", + }, + ], + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 202 + assert "import_id" in response.data + assert Project.objects.filter(slug="invalid-project").count() == 0 + + +def test_dump_import_throttling(client, settings): + settings.REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"]["import-dump-mode"] = "1/minute" + + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": project.slug, + "name": "Test import", + "description": "Valid project desc", + "is_private": True + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 201 + response = client.post(url, {'dump': data}) + assert response.status_code == 429 + + +def test_valid_dump_import_without_slug(client): + project = f.ProjectFactory.create(slug="existing-slug") + user = f.UserFactory.create() + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "name": "Project name", + "description": "Valid project desc", + "is_private": True + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 201 + + +def test_valid_dump_import_with_logo(client, settings): + user = f.UserFactory.create() + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": "valid-project", + "name": "Valid project", + "description": "Valid project desc", + "is_private": False, + "logo": { + "name": "logo.bmp", + "data": base64.b64encode(DUMMY_BMP_DATA).decode("utf-8") + } + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 201 + assert "id" in response.data + assert response.data["name"] == "Valid project" + assert "logo_small_url" in response.data + assert response.data["logo_small_url"] != None + assert "logo_big_url" in response.data + assert response.data["logo_big_url"] != None + + def test_valid_project_import_and_disabled_is_featured(client): user = f.UserFactory.create() client.login(user) @@ -1602,8 +1676,30 @@ def test_valid_project_import_and_disabled_is_featured(client): "is_featured": True } - response = client.post(url, json.dumps(data), content_type="application/json") + response = client.json.post(url, json.dumps(data)) assert response.status_code == 201 - response_data = response.data - assert response_data["owner"] == user.email - assert response_data["is_featured"] == False + assert response.data["owner"] == user.email + assert response.data["is_featured"] == False + + +def test_dump_import_duplicated_project(client): + user = f.UserFactory.create() + project = f.ProjectFactory.create(owner=user) + client.login(user) + + url = reverse("importer-load-dump") + + data = ContentFile(bytes(json.dumps({ + "slug": project.slug, + "name": "Test import", + "description": "Valid project desc", + "is_private": True + }), "utf-8")) + data.name = "test" + + response = client.post(url, {'dump': data}) + assert response.status_code == 201 + assert response.data["name"] == "Test import" + assert response.data["slug"] == "{}-test-import".format(user.username) + + diff --git a/tests/unit/test_export.py b/tests/unit/test_export.py index d80103a3..6a4a3ff0 100644 --- a/tests/unit/test_export.py +++ b/tests/unit/test_export.py @@ -20,7 +20,7 @@ import io from .. import factories as f from taiga.base.utils import json -from taiga.export_import.service import render_project +from taiga.export_import.services import render_project pytestmark = pytest.mark.django_db