Merge pull request #1097 from taigaio/3.3.0rc

3.3.0rc
remotes/origin/3.4.0rc
Alex Hermida 2018-04-24 13:35:32 +02:00 committed by GitHub
commit b464da2b99
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 839 additions and 94 deletions

View File

@ -2,6 +2,25 @@
## Unreleased ## Unreleased
## 3.3.0 Picea mariana (2018-04-26)
### Features
- Add "live notifications" to Taiga:
- Migration for user configuration.
- Add due date to US, tasks and issues (https://tree.taiga.io/project/taiga/issue/3070):
- Add to csv export.
- Add to projects import/export.
- Add to webhooks.
- Add to django admin.
- Add multiple assignement only in US (https://tree.taiga.io/project/taiga/issue/1961):
- The `assigned_to` field is still active.
- Add to csv export.
- Add to projects import/export.
- Add to webhooks.
- Add to django admin.
- Delete cards in Kanban and sprint Taskboard (https://tree.taiga.io/project/taiga/issue/2683).
## 3.2.3 (2018-04-04) ## 3.2.3 (2018-04-04)
### Misc ### Misc

View File

@ -151,9 +151,14 @@ def update_attr_in_bulk_for_ids(values, attr, model):
# We can have deadlocks with multiple updates over the same object # We can have deadlocks with multiple updates over the same object
# In that situation we just retry # In that situation we just retry
import time
ts = time.time()
def trace_info(retries):
return '/* query=update_attr_in_bulk id={ts} retries={retries} */'.format(retries=retries, ts=ts)
def _run_sql(retries=0, max_retries=3): def _run_sql(retries=0, max_retries=3):
try: try:
cursor.execute(sql) cursor.execute(trace_info(retries) + sql)
except DatabaseError: except DatabaseError:
if retries < max_retries: if retries < max_retries:
_run_sql(retries + 1) _run_sql(retries + 1)

View File

@ -17,12 +17,12 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
def make_diff(first:dict, second:dict, not_found_value=None, excluded_keys:tuple=()) -> dict: def make_diff(first: dict, second: dict, not_found_value=None,
excluded_keys: tuple = ()) -> dict:
""" """
Compute a diff between two dicts. Compute a diff between two dicts.
""" """
diff = {} diff = {}
# Check all keys in first dict # Check all keys in first dict
for key in first: for key in first:
if key not in second: if key not in second:

View File

@ -20,11 +20,15 @@
import collections import collections
from django.db import connection from django.db import connection
from django.utils.translation import ugettext_lazy as _
from taiga.base.utils import json from taiga.base.utils import json
from taiga.base.utils.db import get_typename_for_model_instance from taiga.base.utils.db import get_typename_for_model_instance
from . import middleware as mw from . import middleware as mw
from . import backends from . import backends
from taiga.front.templatetags.functions import resolve
from taiga.projects.history.choices import HistoryType
# The complete list of content types # The complete list of content types
# of allowed models for change events # of allowed models for change events
@ -87,6 +91,85 @@ def emit_event_for_model(obj, *, type:str="change", channel:str="events",
sessionid=sessionid, sessionid=sessionid,
data=data) data=data)
def emit_live_notification_for_model(obj, user, history, *, type:str="change", channel:str="events",
sessionid:str="not-existing"):
"""
Sends a model live notification to users.
"""
if obj._importing:
return None
content_type = get_typename_for_model_instance(obj)
if content_type == "userstories.userstory":
if history.type == HistoryType.create:
title = _("User story created")
url = resolve("userstory", obj.project.slug, obj.ref)
elif history.type == HistoryType.change:
title = _("User story changed")
url = resolve("userstory", obj.project.slug, obj.ref)
else:
title = _("User story deleted")
url = None
body = _("US #{} - {}").format(obj.ref, obj.subject)
elif content_type == "tasks.task":
if history.type == HistoryType.create:
title = _("Task created")
url = resolve("task", obj.project.slug, obj.ref)
elif history.type == HistoryType.change:
title = _("Task changed")
url = resolve("task", obj.project.slug, obj.ref)
else:
title = _("Task deleted")
url = None
body = _("Task #{} - {}").format(obj.ref, obj.subject)
elif content_type == "issues.issue":
if history.type == HistoryType.create:
title = _("Issue created")
url = resolve("issue", obj.project.slug, obj.ref)
elif history.type == HistoryType.change:
title = _("Issue changed")
url = resolve("issue", obj.project.slug, obj.ref)
else:
title = _("Issue deleted")
url = None
body = _("Issue: #{} - {}").format(obj.ref, obj.subject)
elif content_type == "wiki.wiki_page":
if history.type == HistoryType.create:
title = _("Wiki Page created")
url = resolve("wiki", obj.project.slug, obj.slug)
elif history.type == HistoryType.change:
title = _("Wiki Page changed")
url = resolve("wiki", obj.project.slug, obj.slug)
else:
title = _("Wiki Page deleted")
url = None
body = _("Wiki Page: {}").format(obj.slug)
elif content_type == "milestones.milestone":
if history.type == HistoryType.create:
title = _("Sprint created")
url = resolve("taskboard", obj.project.slug, obj.slug)
elif history.type == HistoryType.change:
title = _("Sprint changed")
url = resolve("taskboard", obj.project.slug, obj.slug)
else:
title = _("Sprint deleted")
url = None
body = _("Sprint: {}").format(obj.name)
else:
return None
return emit_event(
{
"title": title,
"body": "Project: {}\n{}".format(obj.project.name, body),
"url": url,
"timeout": 10000,
"id": history.id
},
"live_notifications.{}".format(user.id),
sessionid=sessionid
)
def emit_event_for_ids(ids, content_type:str, projectid:int, *, def emit_event_for_ids(ids, content_type:str, projectid:int, *,
type:str="change", channel:str="events", sessionid:str=None): type:str="change", channel:str="events", sessionid:str=None):

View File

@ -215,6 +215,8 @@ class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin,
blocked_note = Field() blocked_note = Field()
is_blocked = Field() is_blocked = Field()
tags = Field() tags = Field()
due_date = DateTimeField()
due_date_reason = Field()
def custom_attributes_queryset(self, project): def custom_attributes_queryset(self, project):
if project.id not in _custom_tasks_attributes_cache: if project.id not in _custom_tasks_attributes_cache:
@ -235,6 +237,7 @@ class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin,
role_points = RolePointsExportSerializer(many=True) role_points = RolePointsExportSerializer(many=True)
owner = UserRelatedField() owner = UserRelatedField()
assigned_to = UserRelatedField() assigned_to = UserRelatedField()
assigned_users = MethodField()
status = SlugRelatedField(slug_field="name") status = SlugRelatedField(slug_field="name")
milestone = SlugRelatedField(slug_field="name") milestone = SlugRelatedField(slug_field="name")
modified_date = DateTimeField() modified_date = DateTimeField()
@ -256,6 +259,8 @@ class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin,
blocked_note = Field() blocked_note = Field()
is_blocked = Field() is_blocked = Field()
tags = Field() tags = Field()
due_date = DateTimeField()
due_date_reason = Field()
def custom_attributes_queryset(self, project): def custom_attributes_queryset(self, project):
if project.id not in _custom_userstories_attributes_cache: if project.id not in _custom_userstories_attributes_cache:
@ -269,6 +274,10 @@ class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin,
_userstories_statuses_cache[project.id] = {s.id: s.name for s in project.us_statuses.all()} _userstories_statuses_cache[project.id] = {s.id: s.name for s in project.us_statuses.all()}
return _userstories_statuses_cache[project.id] return _userstories_statuses_cache[project.id]
def get_assigned_users(self, obj):
return [user.email for user in obj.assigned_users.all()]
class EpicRelatedUserStoryExportSerializer(RelatedExportSerializer): class EpicRelatedUserStoryExportSerializer(RelatedExportSerializer):
user_story = SlugRelatedField(slug_field="ref") user_story = SlugRelatedField(slug_field="ref")
order = Field() order = Field()
@ -339,6 +348,9 @@ class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin,
is_blocked = Field() is_blocked = Field()
tags = Field() tags = Field()
due_date = DateTimeField()
due_date_reason = Field()
def get_votes(self, obj): def get_votes(self, obj):
return [x.email for x in votes_service.get_voters(obj)] return [x.email for x in votes_service.get_voters(obj)]

View File

@ -159,6 +159,7 @@ class HistorySnapshotField(JSONField):
return data return data
class HistoryUserField(JSONField): class HistoryUserField(JSONField):
def from_native(self, data): def from_native(self, data):
if data is None: if data is None:

View File

@ -258,6 +258,7 @@ class TaskExportValidator(WatcheableObjectModelValidatorMixin):
milestone = ProjectRelatedField(slug_field="name", required=False) milestone = ProjectRelatedField(slug_field="name", required=False)
assigned_to = UserRelatedField(required=False) assigned_to = UserRelatedField(required=False)
modified_date = serializers.DateTimeField(required=False) modified_date = serializers.DateTimeField(required=False)
due_date = serializers.DateTimeField(required=False)
class Meta: class Meta:
model = tasks_models.Task model = tasks_models.Task
@ -301,10 +302,12 @@ class UserStoryExportValidator(WatcheableObjectModelValidatorMixin):
role_points = RolePointsExportValidator(many=True, required=False) role_points = RolePointsExportValidator(many=True, required=False)
owner = UserRelatedField(required=False) owner = UserRelatedField(required=False)
assigned_to = UserRelatedField(required=False) assigned_to = UserRelatedField(required=False)
assigned_users = UserRelatedField(many=True, required=False)
status = ProjectRelatedField(slug_field="name") status = ProjectRelatedField(slug_field="name")
milestone = ProjectRelatedField(slug_field="name", required=False) milestone = ProjectRelatedField(slug_field="name", required=False)
modified_date = serializers.DateTimeField(required=False) modified_date = serializers.DateTimeField(required=False)
generated_from_issue = ProjectRelatedField(slug_field="ref", required=False) generated_from_issue = ProjectRelatedField(slug_field="ref", required=False)
due_date = serializers.DateTimeField(required=False)
class Meta: class Meta:
model = userstories_models.UserStory model = userstories_models.UserStory
@ -327,6 +330,7 @@ class IssueExportValidator(WatcheableObjectModelValidatorMixin):
type = ProjectRelatedField(slug_field="name") type = ProjectRelatedField(slug_field="name")
milestone = ProjectRelatedField(slug_field="name", required=False) milestone = ProjectRelatedField(slug_field="name", required=False)
modified_date = serializers.DateTimeField(required=False) modified_date = serializers.DateTimeField(required=False)
due_date = serializers.DateTimeField(required=False)
class Meta: class Meta:
model = issues_models.Issue model = issues_models.Issue

View File

@ -16,6 +16,7 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
from django_jinja import library from django_jinja import library
from django_sites import get_by_id as get_site_by_id from django_sites import get_by_id as get_site_by_id
@ -31,3 +32,9 @@ def resolve(type, *args):
scheme = site.scheme and "{0}:".format(site.scheme) or "" scheme = site.scheme and "{0}:".format(site.scheme) or ""
url = urls[type].format(*args) url = urls[type].format(*args)
return url_tmpl.format(scheme=scheme, domain=site.domain, url=url) return url_tmpl.format(scheme=scheme, domain=site.domain, url=url)
@library.filter(name="date")
def format_date(value, *args):
date_value = datetime.strptime(value, '%Y-%m-%d')
return date_value.strftime('%d %b %Y')

View File

View File

@ -0,0 +1,28 @@
# Copyright (C) 2018 Miguel González <migonzalvar@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django.utils.translation import ugettext_lazy as _
class DueDateMixin(models.Model):
due_date = models.DateField(
blank=True, null=True, default=None, verbose_name=_('due date'),
)
due_date_reason = models.TextField(
null=False, blank=True, default='', verbose_name=_('reason for the due date'),
)
class Meta:
abstract = True

View File

@ -0,0 +1,40 @@
# Copyright (C) 2018 Miguel González <migonzalvar@gmail.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime as dt
from django.utils import timezone
from taiga.base.api import serializers
from taiga.base.fields import Field, MethodField
class DueDateSerializerMixin(serializers.LightSerializer):
due_date = Field()
due_date_reason = Field()
due_date_status = MethodField()
THRESHOLD = 14
def get_due_date_status(self, obj):
if obj.due_date is None:
return 'not_set'
elif obj.status and obj.status.is_closed:
return 'no_longer_applicable'
elif timezone.now().date() > obj.due_date:
return 'past_due'
elif (timezone.now().date() + dt.timedelta(
days=self.THRESHOLD)) >= obj.due_date:
return 'due_soon'
else:
return 'set'

View File

@ -92,11 +92,15 @@ def _common_users_values(diff):
users.update(diff["owner"]) users.update(diff["owner"])
if "assigned_to" in diff: if "assigned_to" in diff:
users.update(diff["assigned_to"]) users.update(diff["assigned_to"])
if "assigned_users" in diff:
[users.update(usrs_ids) for usrs_ids in diff["assigned_users"] if
usrs_ids]
if users: if users:
values["users"] = _get_users_values(users) values["users"] = _get_users_values(users)
return values return values
def project_values(diff): def project_values(diff):
values = _common_users_values(diff) values = _common_users_values(diff)
return values return values
@ -332,6 +336,12 @@ def userstory_freezer(us) -> dict:
for rp in rpqsd: for rp in rpqsd:
points[str(rp.role_id)] = rp.points_id points[str(rp.role_id)] = rp.points_id
assigned_users = [u.id for u in us.assigned_users.all()]
# Due to multiple assignment migration, for new snapshots we add to
# assigned users a list with the 'assigned to' value
if us.assigned_to_id and not assigned_users:
assigned_users = [us.assigned_to_id]
snapshot = { snapshot = {
"ref": us.ref, "ref": us.ref,
"owner": us.owner_id, "owner": us.owner_id,
@ -345,6 +355,7 @@ def userstory_freezer(us) -> dict:
"description": us.description, "description": us.description,
"description_html": mdrender(us.project, us.description), "description_html": mdrender(us.project, us.description),
"assigned_to": us.assigned_to_id, "assigned_to": us.assigned_to_id,
"assigned_users": assigned_users,
"milestone": us.milestone_id, "milestone": us.milestone_id,
"client_requirement": us.client_requirement, "client_requirement": us.client_requirement,
"team_requirement": us.team_requirement, "team_requirement": us.team_requirement,
@ -357,6 +368,7 @@ def userstory_freezer(us) -> dict:
"blocked_note_html": mdrender(us.project, us.blocked_note), "blocked_note_html": mdrender(us.project, us.blocked_note),
"custom_attributes": extract_user_story_custom_attributes(us), "custom_attributes": extract_user_story_custom_attributes(us),
"tribe_gig": us.tribe_gig, "tribe_gig": us.tribe_gig,
"due_date": str(us.due_date) if us.due_date else None
} }
return snapshot return snapshot
@ -381,6 +393,7 @@ def issue_freezer(issue) -> dict:
"blocked_note": issue.blocked_note, "blocked_note": issue.blocked_note,
"blocked_note_html": mdrender(issue.project, issue.blocked_note), "blocked_note_html": mdrender(issue.project, issue.blocked_note),
"custom_attributes": extract_issue_custom_attributes(issue), "custom_attributes": extract_issue_custom_attributes(issue),
"due_date": str(issue.due_date) if issue.due_date else None
} }
return snapshot return snapshot
@ -406,6 +419,7 @@ def task_freezer(task) -> dict:
"blocked_note": task.blocked_note, "blocked_note": task.blocked_note,
"blocked_note_html": mdrender(task.project, task.blocked_note), "blocked_note_html": mdrender(task.project, task.blocked_note),
"custom_attributes": extract_task_custom_attributes(task), "custom_attributes": extract_task_custom_attributes(task),
"due_date": str(task.due_date) if task.due_date else None
} }
return snapshot return snapshot

View File

@ -176,6 +176,15 @@ class HistoryEntry(models.Model):
(key, value) = resolve_diff_value(key) (key, value) = resolve_diff_value(key)
elif key in users_keys: elif key in users_keys:
value = [resolve_value("users", x) for x in self.diff[key]] value = [resolve_value("users", x) for x in self.diff[key]]
elif key == "assigned_users":
diff_in, diff_out = self.diff[key]
value_in = None
value_out = None
if diff_in:
value_in = ", ".join([resolve_value("users", x) for x in diff_in])
if diff_out:
value_out = ", ".join([resolve_value("users", x) for x in diff_out])
value = [value_in, value_out]
elif key == "points": elif key == "points":
points = {} points = {}

View File

@ -81,10 +81,15 @@ _values_impl_map = {}
# this fields are marked as hidden). # this fields are marked as hidden).
_not_important_fields = { _not_important_fields = {
"epics.epic": frozenset(["epics_order", "user_stories"]), "epics.epic": frozenset(["epics_order", "user_stories"]),
"userstories.userstory": frozenset(["backlog_order", "sprint_order", "kanban_order"]), "userstories.userstory": frozenset(
["backlog_order", "sprint_order", "kanban_order"]),
"tasks.task": frozenset(["us_order", "taskboard_order"]), "tasks.task": frozenset(["us_order", "taskboard_order"]),
} }
_deprecated_fields = {
"userstories.userstory": frozenset(["assigned_to"]),
}
log = logging.getLogger("taiga.history") log = logging.getLogger("taiga.history")
@ -191,7 +196,8 @@ def freeze_model_instance(obj: object) -> FrozenObj:
key = make_key_from_model_object(obj) key = make_key_from_model_object(obj)
impl_fn = _freeze_impl_map[typename] impl_fn = _freeze_impl_map[typename]
snapshot = impl_fn(obj) snapshot = impl_fn(obj)
assert isinstance(snapshot, dict), "freeze handlers should return always a dict" assert isinstance(snapshot, dict), \
"freeze handlers should return always a dict"
return FrozenObj(key, snapshot) return FrozenObj(key, snapshot)
@ -216,12 +222,46 @@ def is_hidden_snapshot(obj: FrozenDiff) -> bool:
return False return False
def make_diff(oldobj: FrozenObj, newobj: FrozenObj) -> FrozenDiff: def get_excluded_fields(typename: str) -> tuple:
"""
Get excluded and deprected fields to avoid in the diff
"""
return _deprecated_fields.get(typename, ())
def migrate_userstory_diff(obj: FrozenObj) -> FrozenObj:
# Due to multiple assignment migration, for old snapshots we add a list
# with the 'assigned to' value
if 'assigned_users' not in obj.snapshot.keys():
snapshot = deepcopy(obj.snapshot)
snapshot['assigned_users'] = [obj.snapshot['assigned_to']]
obj = FrozenObj(obj.key, snapshot)
return obj
_migrations = {"userstories.userstory": migrate_userstory_diff}
def migrate_to_last_version(typename: str, obj: FrozenObj) -> FrozenObj:
"""""
Adapt old snapshots to the last format in order to generate correct diffs.
:param typename:
:param obj:
:return:
"""
return _migrations.get(typename, lambda x: x)(obj)
def make_diff(oldobj: FrozenObj, newobj: FrozenObj,
excluded_keys: tuple = ()) -> FrozenDiff:
""" """
Compute a diff between two frozen objects. Compute a diff between two frozen objects.
""" """
assert isinstance(newobj, FrozenObj), "newobj parameter should be instance of FrozenObj" assert isinstance(newobj, FrozenObj), \
"newobj parameter should be instance of FrozenObj"
if oldobj is None: if oldobj is None:
return FrozenDiff(newobj.key, {}, newobj.snapshot) return FrozenDiff(newobj.key, {}, newobj.snapshot)
@ -229,7 +269,7 @@ def make_diff(oldobj: FrozenObj, newobj: FrozenObj) -> FrozenDiff:
first = oldobj.snapshot first = oldobj.snapshot
second = newobj.snapshot second = newobj.snapshot
diff = make_diff_from_dicts(first, second) diff = make_diff_from_dicts(first, second, None, excluded_keys)
return FrozenDiff(newobj.key, diff, newobj.snapshot) return FrozenDiff(newobj.key, diff, newobj.snapshot)
@ -242,7 +282,8 @@ def make_diff_values(typename: str, fdiff: FrozenDiff) -> dict:
""" """
if typename not in _values_impl_map: if typename not in _values_impl_map:
log.warning("No implementation found of '{}' for values.".format(typename)) log.warning(
"No implementation found of '{}' for values.".format(typename))
return {} return {}
impl_fn = _values_impl_map[typename] impl_fn = _values_impl_map[typename]
@ -294,10 +335,12 @@ def get_modified_fields(obj: object, last_modifications):
""" """
key = make_key_from_model_object(obj) key = make_key_from_model_object(obj)
entry_model = apps.get_model("history", "HistoryEntry") entry_model = apps.get_model("history", "HistoryEntry")
history_entries = (entry_model.objects history_entries = (
.filter(key=key) entry_model.objects.filter(key=key)
.order_by("-created_at") .order_by("-created_at")
.values_list("diff", flat=True)[0:last_modifications]) .values_list("diff",
flat=True)[0:last_modifications]
)
modified_fields = [] modified_fields = []
for history_entry in history_entries: for history_entry in history_entries:
@ -307,7 +350,8 @@ def get_modified_fields(obj: object, last_modifications):
@tx.atomic @tx.atomic
def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False): def take_snapshot(obj: object, *, comment: str="", user=None,
delete: bool=False):
""" """
Given any model instance with registred content type, Given any model instance with registred content type,
create new history entry of "change" type. create new history entry of "change" type.
@ -323,6 +367,10 @@ def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False
new_fobj = freeze_model_instance(obj) new_fobj = freeze_model_instance(obj)
old_fobj, need_real_snapshot = get_last_snapshot_for_key(key) old_fobj, need_real_snapshot = get_last_snapshot_for_key(key)
# migrate diff to latest schema
if old_fobj:
old_fobj = migrate_to_last_version(typename, old_fobj)
entry_model = apps.get_model("history", "HistoryEntry") entry_model = apps.get_model("history", "HistoryEntry")
user_id = None if user is None else user.id user_id = None if user is None else user.id
user_name = "" if user is None else user.get_full_name() user_name = "" if user is None else user.get_full_name()
@ -338,11 +386,15 @@ def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False
else: else:
raise RuntimeError("Unexpected condition") raise RuntimeError("Unexpected condition")
fdiff = make_diff(old_fobj, new_fobj) excluded_fields = get_excluded_fields(typename)
fdiff = make_diff(old_fobj, new_fobj, excluded_fields)
# If diff and comment are empty, do # If diff and comment are empty, do
# not create empty history entry # not create empty history entry
if (not fdiff.diff and not comment and old_fobj is not None and entry_type != HistoryType.delete): if (not fdiff.diff and
not comment and old_fobj is not None and
entry_type != HistoryType.delete):
return None return None
fvals = make_diff_values(typename, fdiff) fvals = make_diff_values(typename, fdiff)
@ -371,7 +423,8 @@ def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False
# High level query api # High level query api
def get_history_queryset_by_model_instance(obj: object, types=(HistoryType.change,), def get_history_queryset_by_model_instance(obj: object,
types=(HistoryType.change,),
include_hidden=False): include_hidden=False):
""" """
Get one page of history for specified object. Get one page of history for specified object.
@ -391,16 +444,18 @@ def prefetch_owners_in_history_queryset(qs):
users = get_user_model().objects.filter(id__in=user_ids) users = get_user_model().objects.filter(id__in=user_ids)
users_by_id = {u.id: u for u in users} users_by_id = {u.id: u for u in users}
for history_entry in qs: for history_entry in qs:
history_entry.prefetch_owner(users_by_id.get(history_entry.user["pk"], None)) history_entry.prefetch_owner(users_by_id.get(history_entry.user["pk"],
None))
return qs return qs
# Freeze & value register # Freeze & value register
register_freeze_implementation("projects.project", project_freezer) register_freeze_implementation("projects.project", project_freezer)
register_freeze_implementation("milestones.milestone", milestone_freezer,) register_freeze_implementation("milestones.milestone", milestone_freezer)
register_freeze_implementation("epics.epic", epic_freezer) register_freeze_implementation("epics.epic", epic_freezer)
register_freeze_implementation("epics.relateduserstory", epic_related_userstory_freezer) register_freeze_implementation("epics.relateduserstory",
epic_related_userstory_freezer)
register_freeze_implementation("userstories.userstory", userstory_freezer) register_freeze_implementation("userstories.userstory", userstory_freezer)
register_freeze_implementation("issues.issue", issue_freezer) register_freeze_implementation("issues.issue", issue_freezer)
register_freeze_implementation("tasks.task", task_freezer) register_freeze_implementation("tasks.task", task_freezer)
@ -409,7 +464,8 @@ register_freeze_implementation("wiki.wikipage", wikipage_freezer)
register_values_implementation("projects.project", project_values) register_values_implementation("projects.project", project_values)
register_values_implementation("milestones.milestone", milestone_values) register_values_implementation("milestones.milestone", milestone_values)
register_values_implementation("epics.epic", epic_values) register_values_implementation("epics.epic", epic_values)
register_values_implementation("epics.relateduserstory", epic_related_userstory_values) register_values_implementation("epics.relateduserstory",
epic_related_userstory_values)
register_values_implementation("userstories.userstory", userstory_values) register_values_implementation("userstories.userstory", userstory_values)
register_values_implementation("issues.issue", issue_values) register_values_implementation("issues.issue", issue_values)
register_values_implementation("tasks.task", task_values) register_values_implementation("tasks.task", task_values)

View File

@ -149,6 +149,60 @@
{% endif %} {% endif %}
</td> </td>
</tr> </tr>
{# ASSIGNED TO #}
{% elif field_name == "due_date" %}
<tr>
<td valign="middle" rowspan="2" class="update-row-name">
<h3>{{ verbose_name(obj_class, field_name) }}</h3>
</td>
<td valign="top" class="update-row-from">
{% if values.0 != None and values.0 != "" %}
<span>{{ _("from") }}</span><br>
<strong>{{ values.0|date }}</strong>
{% else %}
<span>{{ _("from") }}</span><br>
<strong>{{ _("Not set") }}</strong>
{% endif %}
</td>
</tr>
<tr>
<td valign="top">
{% if values.1 != None and values.1 != "" %}
<span>{{ _("to") }}</span><br>
<strong>{{ values.1|date }}</strong>
{% else %}
<span>{{ _("to") }}</span><br>
<strong>{{ _("Not set") }}</strong>
{% endif %}
</td>
</tr>
{# ASSIGNED users #}
{% elif field_name == "assigned_users" %}
<tr>
<td valign="middle" rowspan="2" class="update-row-name">
<h3>{{ verbose_name(obj_class, field_name) }}</h3>
</td>
<td valign="top" class="update-row-from">
{% if values.0 != None and values.0 != "" %}
<span>{{ _("from") }}</span><br>
<strong>{{ values.0 }}</strong>
{% else %}
<span>{{ _("from") }}</span><br>
<strong>{{ _("Unassigned") }}</strong>
{% endif %}
</td>
</tr>
<tr>
<td valign="top">
{% if values.1 != None and values.1 != "" %}
<span>{{ _("to") }}</span><br>
<strong>{{ values.1 }}</strong>
{% else %}
<span>{{ _("to") }}</span><br>
<strong>{{ _("Unassigned") }}</strong>
{% endif %}
</td>
</tr>
{# * #} {# * #}
{% else %} {% else %}
<tr> <tr>

View File

@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-04-09 09:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('issues', '0007_auto_20160614_1201'),
]
operations = [
migrations.AddField(
model_name='issue',
name='due_date',
field=models.DateField(blank=True, default=None, null=True, verbose_name='due date'),
),
migrations.AddField(
model_name='issue',
name='due_date_reason',
field=models.TextField(blank=True, default='', verbose_name='reason for the due date'),
),
]

View File

@ -24,13 +24,14 @@ from django.utils import timezone
from django.dispatch import receiver from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from taiga.projects.due_dates.models import DueDateMixin
from taiga.projects.occ import OCCModelMixin from taiga.projects.occ import OCCModelMixin
from taiga.projects.notifications.mixins import WatchedModelMixin from taiga.projects.notifications.mixins import WatchedModelMixin
from taiga.projects.mixins.blocked import BlockedMixin from taiga.projects.mixins.blocked import BlockedMixin
from taiga.projects.tagging.models import TaggedMixin from taiga.projects.tagging.models import TaggedMixin
class Issue(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, models.Model): class Issue(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, DueDateMixin, models.Model):
ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None, ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None,
verbose_name=_("ref")) verbose_name=_("ref"))
owner = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, default=None, owner = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, default=None,

View File

@ -21,6 +21,7 @@ from taiga.base.fields import Field, MethodField
from taiga.base.neighbors import NeighborsSerializerMixin from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender from taiga.mdrender.service import render as mdrender
from taiga.projects.due_dates.serializers import DueDateSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
@ -33,7 +34,8 @@ from taiga.projects.votes.mixins.serializers import VoteResourceSerializerMixin
class IssueListSerializer(VoteResourceSerializerMixin, WatchedResourceSerializer, class IssueListSerializer(VoteResourceSerializerMixin, WatchedResourceSerializer,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin, OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin, StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin,
TaggedInProjectResourceSerializer, serializers.LightSerializer): DueDateSerializerMixin, TaggedInProjectResourceSerializer,
serializers.LightSerializer):
id = Field() id = Field()
ref = Field() ref = Field()
severity = Field(attr="severity_id") severity = Field(attr="severity_id")

View File

@ -82,7 +82,8 @@ def issues_to_csv(project, queryset):
"sprint_estimated_finish", "owner", "owner_full_name", "assigned_to", "sprint_estimated_finish", "owner", "owner_full_name", "assigned_to",
"assigned_to_full_name", "status", "severity", "priority", "type", "assigned_to_full_name", "status", "severity", "priority", "type",
"is_closed", "attachments", "external_reference", "tags", "watchers", "is_closed", "attachments", "external_reference", "tags", "watchers",
"voters", "created_date", "modified_date", "finished_date"] "voters", "created_date", "modified_date", "finished_date", "due_date",
"due_date_reason"]
custom_attrs = project.issuecustomattributes.all() custom_attrs = project.issuecustomattributes.all()
for custom_attr in custom_attrs: for custom_attr in custom_attrs:
@ -125,6 +126,8 @@ def issues_to_csv(project, queryset):
"created_date": issue.created_date, "created_date": issue.created_date,
"modified_date": issue.modified_date, "modified_date": issue.modified_date,
"finished_date": issue.finished_date, "finished_date": issue.finished_date,
"due_date": issue.due_date,
"due_date_reason": issue.due_date_reason,
} }
for custom_attr in custom_attrs: for custom_attr in custom_attrs:

View File

@ -32,5 +32,5 @@ class WatchedInline(GenericTabularInline):
class NotifyPolicyInline(TabularInline): class NotifyPolicyInline(TabularInline):
model = models.NotifyPolicy model = models.NotifyPolicy
extra = 0 extra = 0
readonly_fields = ("notify_level",) readonly_fields = ("notify_level", "live_notify_level")
raw_id_fields = ["user"] raw_id_fields = ["user"]

View File

@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-31 13:03
from __future__ import unicode_literals
from django.db import migrations, models
import taiga.projects.notifications.choices
class Migration(migrations.Migration):
dependencies = [
('notifications', '0006_auto_20151103_0954'),
]
operations = [
migrations.AddField(
model_name='notifypolicy',
name='live_notify_level',
field=models.SmallIntegerField(choices=[(taiga.projects.notifications.choices.NotifyLevel(1), 'Involved'), (taiga.projects.notifications.choices.NotifyLevel(2), 'All'), (taiga.projects.notifications.choices.NotifyLevel(3), 'None')], default=taiga.projects.notifications.choices.NotifyLevel(1)),
),
]

View File

@ -36,6 +36,7 @@ class NotifyPolicy(models.Model):
project = models.ForeignKey("projects.Project", related_name="notify_policies") project = models.ForeignKey("projects.Project", related_name="notify_policies")
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="notify_policies") user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="notify_policies")
notify_level = models.SmallIntegerField(choices=NOTIFY_LEVEL_CHOICES) notify_level = models.SmallIntegerField(choices=NOTIFY_LEVEL_CHOICES)
live_notify_level = models.SmallIntegerField(choices=NOTIFY_LEVEL_CHOICES, default=NotifyLevel.involved)
created_at = models.DateTimeField(default=timezone.now) created_at = models.DateTimeField(default=timezone.now)
modified_at = models.DateTimeField() modified_at = models.DateTimeField()

View File

@ -27,7 +27,7 @@ class NotifyPolicySerializer(serializers.ModelSerializer):
class Meta: class Meta:
model = models.NotifyPolicy model = models.NotifyPolicy
fields = ('id', 'project', 'project_name', 'notify_level') fields = ('id', 'project', 'project_name', 'notify_level', "live_notify_level")
def get_project_name(self, obj): def get_project_name(self, obj):
return obj.project.name return obj.project.name

View File

@ -38,6 +38,7 @@ from taiga.projects.history.services import (make_key_from_model_object,
get_last_snapshot_for_key, get_last_snapshot_for_key,
get_model_from_key) get_model_from_key)
from taiga.permissions.services import user_has_perm from taiga.permissions.services import user_has_perm
from taiga.events import events
from .models import HistoryChangeNotification, Watched from .models import HistoryChangeNotification, Watched
from .squashing import squash_history_entries from .squashing import squash_history_entries
@ -54,7 +55,8 @@ def notify_policy_exists(project, user) -> bool:
return qs.exists() return qs.exists()
def create_notify_policy(project, user, level=NotifyLevel.involved): def create_notify_policy(project, user, level=NotifyLevel.involved,
live_level=NotifyLevel.involved):
""" """
Given a project and user, create notification policy for it. Given a project and user, create notification policy for it.
""" """
@ -62,23 +64,30 @@ def create_notify_policy(project, user, level=NotifyLevel.involved):
try: try:
return model_cls.objects.create(project=project, return model_cls.objects.create(project=project,
user=user, user=user,
notify_level=level) notify_level=level,
live_notify_level=live_level)
except IntegrityError as e: except IntegrityError as e:
raise exc.IntegrityError(_("Notify exists for specified user and project")) from e raise exc.IntegrityError(
_("Notify exists for specified user and project")) from e
def create_notify_policy_if_not_exists(project, user, level=NotifyLevel.involved): def create_notify_policy_if_not_exists(project, user,
level=NotifyLevel.involved,
live_level=NotifyLevel.involved):
""" """
Given a project and user, create notification policy for it. Given a project and user, create notification policy for it.
""" """
model_cls = apps.get_model("notifications", "NotifyPolicy") model_cls = apps.get_model("notifications", "NotifyPolicy")
try: try:
result = model_cls.objects.get_or_create(project=project, result = model_cls.objects.get_or_create(
project=project,
user=user, user=user,
defaults={"notify_level": level}) defaults={"notify_level": level, "live_notify_level": live_level}
)
return result[0] return result[0]
except IntegrityError as e: except IntegrityError as e:
raise exc.IntegrityError(_("Notify exists for specified user and project")) from e raise exc.IntegrityError(
_("Notify exists for specified user and project")) from e
def analize_object_for_watchers(obj: object, comment: str, user: object): def analize_object_for_watchers(obj: object, comment: str, user: object):
@ -133,7 +142,7 @@ def _filter_notificable(user):
return user.is_active and not user.is_system return user.is_active and not user.is_system
def get_users_to_notify(obj, *, history=None, discard_users=None) -> list: def get_users_to_notify(obj, *, history=None, discard_users=None, live=False) -> list:
""" """
Get filtered set of users to notify for specified Get filtered set of users to notify for specified
model instance and changer. model instance and changer.
@ -145,6 +154,8 @@ def get_users_to_notify(obj, *, history=None, discard_users=None) -> list:
def _check_level(project: object, user: object, levels: tuple) -> bool: def _check_level(project: object, user: object, levels: tuple) -> bool:
policy = project.cached_notify_policy_for_user(user) policy = project.cached_notify_policy_for_user(user)
if live:
return policy.live_notify_level in levels
return policy.notify_level in levels return policy.notify_level in levels
_can_notify_hard = partial(_check_level, project, _can_notify_hard = partial(_check_level, project,
@ -221,7 +232,6 @@ def send_notifications(obj, *, history):
owner=owner, owner=owner,
project=obj.project, project=obj.project,
history_type=history.type)) history_type=history.type))
notification.updated_datetime = timezone.now() notification.updated_datetime = timezone.now()
notification.save() notification.save()
notification.history_entries.add(history) notification.history_entries.add(history)
@ -235,6 +245,10 @@ def send_notifications(obj, *, history):
if settings.CHANGE_NOTIFICATIONS_MIN_INTERVAL == 0: if settings.CHANGE_NOTIFICATIONS_MIN_INTERVAL == 0:
send_sync_notifications(notification.id) send_sync_notifications(notification.id)
live_notify_users = get_users_to_notify(obj, history=history, discard_users=[notification.owner], live=True)
for user in live_notify_users:
events.emit_live_notification_for_model(obj, user, history)
@transaction.atomic @transaction.atomic
def send_sync_notifications(notification_id): def send_sync_notifications(notification_id):
@ -305,6 +319,7 @@ def send_sync_notifications(notification_id):
context["lang"] = user.lang or settings.LANGUAGE_CODE context["lang"] = user.lang or settings.LANGUAGE_CODE
email.send(user.email, context, headers=headers) email.send(user.email, context, headers=headers)
notification.delete() notification.delete()
@ -416,7 +431,11 @@ def add_watcher(obj, user):
project=obj.project) project=obj.project)
notify_policy, _ = apps.get_model("notifications", "NotifyPolicy").objects.get_or_create( notify_policy, _ = apps.get_model("notifications", "NotifyPolicy").objects.get_or_create(
project=obj.project, user=user, defaults={"notify_level": NotifyLevel.involved}) project=obj.project,
user=user,
defaults={"notify_level": NotifyLevel.involved,
"live_notify_level": NotifyLevel.involved}
)
return watched return watched
@ -438,22 +457,25 @@ def remove_watcher(obj, user):
qs.delete() qs.delete()
def set_notify_policy_level(notify_policy, notify_level): def set_notify_policy_level(notify_policy, notify_level, live=False):
""" """
Set notification level for specified policy. Set notification level for specified policy.
""" """
if notify_level not in [e.value for e in NotifyLevel]: if notify_level not in [e.value for e in NotifyLevel]:
raise exc.IntegrityError(_("Invalid value for notify level")) raise exc.IntegrityError(_("Invalid value for notify level"))
if live:
notify_policy.live_notify_level = notify_level
else:
notify_policy.notify_level = notify_level notify_policy.notify_level = notify_level
notify_policy.save() notify_policy.save()
def set_notify_policy_level_to_ignore(notify_policy): def set_notify_policy_level_to_ignore(notify_policy, live=False):
""" """
Set notification level for specified policy. Set notification level for specified policy.
""" """
set_notify_policy_level(notify_policy, NotifyLevel.none) set_notify_policy_level(notify_policy, NotifyLevel.none, live=live)
def make_ms_thread_index(msg_id, dt): def make_ms_thread_index(msg_id, dt):

View File

@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-04-09 09:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0011_auto_20160928_0755'),
]
operations = [
migrations.AddField(
model_name='task',
name='due_date',
field=models.DateField(blank=True, default=None, null=True, verbose_name='due date'),
),
migrations.AddField(
model_name='task',
name='due_date_reason',
field=models.TextField(blank=True, default='', verbose_name='reason for the due date'),
),
]

View File

@ -24,13 +24,14 @@ from django.utils import timezone
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from taiga.base.utils.time import timestamp_ms from taiga.base.utils.time import timestamp_ms
from taiga.projects.due_dates.models import DueDateMixin
from taiga.projects.occ import OCCModelMixin from taiga.projects.occ import OCCModelMixin
from taiga.projects.notifications.mixins import WatchedModelMixin from taiga.projects.notifications.mixins import WatchedModelMixin
from taiga.projects.mixins.blocked import BlockedMixin from taiga.projects.mixins.blocked import BlockedMixin
from taiga.projects.tagging.models import TaggedMixin from taiga.projects.tagging.models import TaggedMixin
class Task(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, models.Model): class Task(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, DueDateMixin, models.Model):
user_story = models.ForeignKey("userstories.UserStory", null=True, blank=True, user_story = models.ForeignKey("userstories.UserStory", null=True, blank=True,
related_name="tasks", verbose_name=_("user story")) related_name="tasks", verbose_name=_("user story"))
ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None, ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None,

View File

@ -22,6 +22,7 @@ from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender from taiga.mdrender.service import render as mdrender
from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin
from taiga.projects.due_dates.serializers import DueDateSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
@ -36,7 +37,8 @@ class TaskListSerializer(VoteResourceSerializerMixin, WatchedResourceSerializer,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin, OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin, StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin,
BasicAttachmentsInfoSerializerMixin, TaggedInProjectResourceSerializer, BasicAttachmentsInfoSerializerMixin, TaggedInProjectResourceSerializer,
TotalCommentsSerializerMixin, serializers.LightSerializer): TotalCommentsSerializerMixin, DueDateSerializerMixin,
serializers.LightSerializer):
id = Field() id = Field()
user_story = Field(attr="user_story_id") user_story = Field(attr="user_story_id")

View File

@ -121,8 +121,9 @@ def tasks_to_csv(project, queryset):
fieldnames = ["ref", "subject", "description", "user_story", "sprint", "sprint_estimated_start", fieldnames = ["ref", "subject", "description", "user_story", "sprint", "sprint_estimated_start",
"sprint_estimated_finish", "owner", "owner_full_name", "assigned_to", "sprint_estimated_finish", "owner", "owner_full_name", "assigned_to",
"assigned_to_full_name", "status", "is_iocaine", "is_closed", "us_order", "assigned_to_full_name", "status", "is_iocaine", "is_closed", "us_order",
"taskboard_order", "attachments", "external_reference", "tags", "watchers", "voters", "taskboard_order", "attachments", "external_reference", "tags", "watchers",
"created_date", "modified_date", "finished_date"] "voters", "created_date", "modified_date", "finished_date", "due_date",
"due_date_reason"]
custom_attrs = project.taskcustomattributes.all() custom_attrs = project.taskcustomattributes.all()
for custom_attr in custom_attrs: for custom_attr in custom_attrs:
@ -167,6 +168,8 @@ def tasks_to_csv(project, queryset):
"created_date": task.created_date, "created_date": task.created_date,
"modified_date": task.modified_date, "modified_date": task.modified_date,
"finished_date": task.finished_date, "finished_date": task.finished_date,
"due_date": task.due_date,
"due_date_reason": task.due_date_reason,
} }
for custom_attr in custom_attrs: for custom_attr in custom_attrs:
value = task.custom_attributes_values.attributes_values.get(str(custom_attr.id), None) value = task.custom_attributes_values.attributes_values.get(str(custom_attr.id), None)

View File

@ -67,6 +67,10 @@ class UserStoryAdmin(admin.ModelAdmin):
and getattr(self, 'obj', None)): and getattr(self, 'obj', None)):
kwargs["queryset"] = db_field.related.parent_model.objects.filter( kwargs["queryset"] = db_field.related.parent_model.objects.filter(
memberships__project=self.obj.project) memberships__project=self.obj.project)
elif (db_field.name in ["assigned_users"]
and getattr(self, 'obj', None)):
kwargs["queryset"] = db_field.related_model.objects.filter(
memberships__project=self.obj.project)
return super().formfield_for_manytomany(db_field, request, **kwargs) return super().formfield_for_manytomany(db_field, request, **kwargs)

View File

@ -127,7 +127,6 @@ class UserStoryViewSet(OCCResourceMixin, VotedResourceMixin, HistoryResourceMixi
include_attachments=include_attachments, include_attachments=include_attachments,
include_tasks=include_tasks, include_tasks=include_tasks,
epic_id=epic_id) epic_id=epic_id)
return qs return qs
def pre_conditions_on_save(self, obj): def pre_conditions_on_save(self, obj):

View File

@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-04-09 09:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userstories', '0014_auto_20160928_0540'),
]
operations = [
migrations.AddField(
model_name='userstory',
name='due_date',
field=models.DateField(blank=True, default=None, null=True, verbose_name='due date'),
),
migrations.AddField(
model_name='userstory',
name='due_date_reason',
field=models.TextField(blank=True, default='', verbose_name='reason for the due date'),
),
]

View File

@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-02-13 10:14
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('userstories', '0015_add_due_date'),
]
operations = [
migrations.AddField(
model_name='userstory',
name='assigned_users',
field=models.ManyToManyField(blank=True, default=None, related_name='assigned_userstories', to=settings.AUTH_USER_MODEL, verbose_name='assigned users'),
),
]

View File

@ -26,6 +26,7 @@ from django.utils import timezone
from picklefield.fields import PickledObjectField from picklefield.fields import PickledObjectField
from taiga.base.utils.time import timestamp_ms from taiga.base.utils.time import timestamp_ms
from taiga.projects.due_dates.models import DueDateMixin
from taiga.projects.tagging.models import TaggedMixin from taiga.projects.tagging.models import TaggedMixin
from taiga.projects.occ import OCCModelMixin from taiga.projects.occ import OCCModelMixin
from taiga.projects.notifications.mixins import WatchedModelMixin from taiga.projects.notifications.mixins import WatchedModelMixin
@ -57,7 +58,7 @@ class RolePoints(models.Model):
return self.user_story.project return self.user_story.project
class UserStory(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, models.Model): class UserStory(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, DueDateMixin, models.Model):
ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None, ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None,
verbose_name=_("ref")) verbose_name=_("ref"))
milestone = models.ForeignKey("milestones.Milestone", null=True, blank=True, milestone = models.ForeignKey("milestones.Milestone", null=True, blank=True,
@ -96,6 +97,9 @@ class UserStory(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, mod
assigned_to = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, assigned_to = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True,
default=None, related_name="userstories_assigned_to_me", default=None, related_name="userstories_assigned_to_me",
verbose_name=_("assigned to")) verbose_name=_("assigned to"))
assigned_users = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True,
default=None, related_name="assigned_userstories",
verbose_name=_("assigned users"))
client_requirement = models.BooleanField(default=False, null=False, blank=True, client_requirement = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("is client requirement")) verbose_name=_("is client requirement"))
team_requirement = models.BooleanField(default=False, null=False, blank=True, team_requirement = models.BooleanField(default=False, null=False, blank=True,

View File

@ -22,6 +22,7 @@ from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender from taiga.mdrender.service import render as mdrender
from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin
from taiga.projects.due_dates.serializers import DueDateSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
@ -49,7 +50,7 @@ class UserStoryListSerializer(ProjectExtraInfoSerializerMixin,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin, OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, BasicAttachmentsInfoSerializerMixin, StatusExtraInfoSerializerMixin, BasicAttachmentsInfoSerializerMixin,
TaggedInProjectResourceSerializer, TotalCommentsSerializerMixin, TaggedInProjectResourceSerializer, TotalCommentsSerializerMixin,
serializers.LightSerializer): DueDateSerializerMixin, serializers.LightSerializer):
id = Field() id = Field()
ref = Field() ref = Field()
@ -82,6 +83,24 @@ class UserStoryListSerializer(ProjectExtraInfoSerializerMixin,
epic_order = MethodField() epic_order = MethodField()
tasks = MethodField() tasks = MethodField()
assigned_users = MethodField()
def get_assigned_users(self, obj):
"""Get the assigned of an object.
:return: User queryset object representing the assigned users
"""
if not obj.assigned_to:
return set([user.id for user in obj.assigned_users.all()])
assigned_users = [user.id for user in obj.assigned_users.all()] + \
[obj.assigned_to.id]
if not assigned_users:
return None
return set(assigned_users)
def get_epic_order(self, obj): def get_epic_order(self, obj):
include_epic_order = getattr(obj, "include_epic_order", False) include_epic_order = getattr(obj, "include_epic_order", False)

View File

@ -38,6 +38,7 @@ from taiga.projects.notifications.utils import attach_watchers_to_queryset
from . import models from . import models
##################################################### #####################################################
# Bulk actions # Bulk actions
##################################################### #####################################################
@ -46,7 +47,8 @@ def get_userstories_from_bulk(bulk_data, **additional_fields):
"""Convert `bulk_data` into a list of user stories. """Convert `bulk_data` into a list of user stories.
:param bulk_data: List of user stories in bulk format. :param bulk_data: List of user stories in bulk format.
:param additional_fields: Additional fields when instantiating each user story. :param additional_fields: Additional fields when instantiating each user
story.
:return: List of `UserStory` instances. :return: List of `UserStory` instances.
""" """
@ -54,12 +56,14 @@ def get_userstories_from_bulk(bulk_data, **additional_fields):
for line in text.split_in_lines(bulk_data)] for line in text.split_in_lines(bulk_data)]
def create_userstories_in_bulk(bulk_data, callback=None, precall=None, **additional_fields): def create_userstories_in_bulk(bulk_data, callback=None, precall=None,
**additional_fields):
"""Create user stories from `bulk_data`. """Create user stories from `bulk_data`.
:param bulk_data: List of user stories in bulk format. :param bulk_data: List of user stories in bulk format.
:param callback: Callback to execute after each user story save. :param callback: Callback to execute after each user story save.
:param additional_fields: Additional fields when instantiating each user story. :param additional_fields: Additional fields when instantiating each user
story.
:return: List of created `Task` instances. :return: List of created `Task` instances.
""" """
@ -76,11 +80,13 @@ def create_userstories_in_bulk(bulk_data, callback=None, precall=None, **additio
return userstories return userstories
def update_userstories_order_in_bulk(bulk_data: list, field: str, project: object, def update_userstories_order_in_bulk(bulk_data: list, field: str,
status: object=None, milestone: object=None): project: object,
status: object = None,
milestone: object = None):
""" """
Updates the order of the userstories specified adding the extra updates needed Updates the order of the userstories specified adding the extra updates
to keep consistency. needed to keep consistency.
`bulk_data` should be a list of dicts with the following format: `bulk_data` should be a list of dicts with the following format:
`field` is the order field used `field` is the order field used
@ -106,8 +112,8 @@ def update_userstories_order_in_bulk(bulk_data: list, field: str, project: objec
def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object): def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object):
""" """
Update the milestone and the milestone order of some user stories adding the Update the milestone and the milestone order of some user stories adding
extra orders needed to keep consistency. the extra orders needed to keep consistency.
`bulk_data` should be a list of dicts with the following format: `bulk_data` should be a list of dicts with the following format:
[{'us_id': <value>, 'order': <value>}, ...] [{'us_id': <value>, 'order': <value>}, ...]
""" """
@ -116,7 +122,8 @@ def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object):
new_us_orders = {} new_us_orders = {}
for e in bulk_data: for e in bulk_data:
new_us_orders[e["us_id"]] = e["order"] new_us_orders[e["us_id"]] = e["order"]
# The base orders where we apply the new orders must containg all the values # The base orders where we apply the new orders must containg all
# the values
us_orders[e["us_id"]] = e["order"] us_orders[e["us_id"]] = e["order"]
apply_order_updates(us_orders, new_us_orders) apply_order_updates(us_orders, new_us_orders)
@ -128,11 +135,14 @@ def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object):
content_type="userstories.userstory", content_type="userstories.userstory",
projectid=milestone.project.pk) projectid=milestone.project.pk)
db.update_attr_in_bulk_for_ids(us_milestones, "milestone_id", model=models.UserStory) db.update_attr_in_bulk_for_ids(us_milestones, "milestone_id",
model=models.UserStory)
db.update_attr_in_bulk_for_ids(us_orders, "sprint_order", models.UserStory) db.update_attr_in_bulk_for_ids(us_orders, "sprint_order", models.UserStory)
# Updating the milestone for the tasks # Updating the milestone for the tasks
Task.objects.filter(user_story_id__in=[e["us_id"] for e in bulk_data]).update(milestone=milestone) Task.objects.filter(
user_story_id__in=[e["us_id"] for e in bulk_data]).update(
milestone=milestone)
return us_orders return us_orders
@ -157,7 +167,8 @@ def calculate_userstory_is_closed(user_story):
if user_story.tasks.count() == 0: if user_story.tasks.count() == 0:
return user_story.status is not None and user_story.status.is_closed return user_story.status is not None and user_story.status.is_closed
if all([task.status is not None and task.status.is_closed for task in user_story.tasks.all()]): if all([task.status is not None and task.status.is_closed for task in
user_story.tasks.all()]):
return True return True
return False return False
@ -183,9 +194,12 @@ def open_userstory(us):
def userstories_to_csv(project, queryset): def userstories_to_csv(project, queryset):
csv_data = io.StringIO() csv_data = io.StringIO()
fieldnames = ["ref", "subject", "description", "sprint", "sprint_estimated_start", fieldnames = ["ref", "subject", "description", "sprint",
"sprint_estimated_finish", "owner", "owner_full_name", "assigned_to", "sprint_estimated_start",
"assigned_to_full_name", "status", "is_closed"] "sprint_estimated_finish", "owner", "owner_full_name",
"assigned_to",
"assigned_to_full_name", "assigned_users",
"assigned_users_full_name", "status", "is_closed"]
roles = project.roles.filter(computable=True).order_by('slug') roles = project.roles.filter(computable=True).order_by('slug')
for role in roles: for role in roles:
@ -197,7 +211,7 @@ def userstories_to_csv(project, queryset):
"created_date", "modified_date", "finish_date", "created_date", "modified_date", "finish_date",
"client_requirement", "team_requirement", "attachments", "client_requirement", "team_requirement", "attachments",
"generated_from_issue", "external_reference", "tasks", "generated_from_issue", "external_reference", "tasks",
"tags", "watchers", "voters"] "tags", "watchers", "voters", "due_date", "due_date_reason"]
custom_attrs = project.userstorycustomattributes.all() custom_attrs = project.userstorycustomattributes.all()
for custom_attr in custom_attrs: for custom_attr in custom_attrs:
@ -227,12 +241,21 @@ def userstories_to_csv(project, queryset):
"subject": us.subject, "subject": us.subject,
"description": us.description, "description": us.description,
"sprint": us.milestone.name if us.milestone else None, "sprint": us.milestone.name if us.milestone else None,
"sprint_estimated_start": us.milestone.estimated_start if us.milestone else None, "sprint_estimated_start": us.milestone.estimated_start if
"sprint_estimated_finish": us.milestone.estimated_finish if us.milestone else None, us.milestone else None,
"sprint_estimated_finish": us.milestone.estimated_finish if
us.milestone else None,
"owner": us.owner.username if us.owner else None, "owner": us.owner.username if us.owner else None,
"owner_full_name": us.owner.get_full_name() if us.owner else None, "owner_full_name": us.owner.get_full_name() if us.owner else None,
"assigned_to": us.assigned_to.username if us.assigned_to else None, "assigned_to": us.assigned_to.username if us.assigned_to else None,
"assigned_to_full_name": us.assigned_to.get_full_name() if us.assigned_to else None, "assigned_to_full_name": us.assigned_to.get_full_name() if
us.assigned_to else None,
"assigned_users": ",".join(
[assigned_user.username for assigned_user in
us.assigned_users.all()]),
"assigned_users_full_name": ",".join(
[assigned_user.get_full_name() for assigned_user in
us.assigned_users.all()]),
"status": us.status.name if us.status else None, "status": us.status.name if us.status else None,
"is_closed": us.is_closed, "is_closed": us.is_closed,
"backlog_order": us.backlog_order, "backlog_order": us.backlog_order,
@ -244,22 +267,28 @@ def userstories_to_csv(project, queryset):
"client_requirement": us.client_requirement, "client_requirement": us.client_requirement,
"team_requirement": us.team_requirement, "team_requirement": us.team_requirement,
"attachments": us.attachments.count(), "attachments": us.attachments.count(),
"generated_from_issue": us.generated_from_issue.ref if us.generated_from_issue else None, "generated_from_issue": us.generated_from_issue.ref if
us.generated_from_issue else None,
"external_reference": us.external_reference, "external_reference": us.external_reference,
"tasks": ",".join([str(task.ref) for task in us.tasks.all()]), "tasks": ",".join([str(task.ref) for task in us.tasks.all()]),
"tags": ",".join(us.tags or []), "tags": ",".join(us.tags or []),
"watchers": us.watchers, "watchers": us.watchers,
"voters": us.total_voters "voters": us.total_voters,
"due_date": us.due_date,
"due_date_reason": us.due_date_reason,
} }
us_role_points_by_role_id = {us_rp.role.id: us_rp.points.value for us_rp in us.role_points.all()} us_role_points_by_role_id = {us_rp.role.id: us_rp.points.value for
us_rp in us.role_points.all()}
for role in roles: for role in roles:
row["{}-points".format(role.slug)] = us_role_points_by_role_id.get(role.id, 0) row["{}-points".format(role.slug)] = \
us_role_points_by_role_id.get(role.id, 0)
row['total-points'] = us.get_total_points() row['total-points'] = us.get_total_points()
for custom_attr in custom_attrs: for custom_attr in custom_attrs:
value = us.custom_attributes_values.attributes_values.get(str(custom_attr.id), None) value = us.custom_attributes_values.attributes_values.get(
str(custom_attr.id), None)
row[custom_attr.name] = value row[custom_attr.name] = value
writer.writerow(row) writer.writerow(row)

View File

@ -177,3 +177,22 @@ def attach_extra_info(queryset, user=None, include_attachments=False, include_ta
queryset = attach_is_watcher_to_queryset(queryset, user) queryset = attach_is_watcher_to_queryset(queryset, user)
queryset = attach_total_comments_to_queryset(queryset) queryset = attach_total_comments_to_queryset(queryset)
return queryset return queryset
def attach_assigned_users(queryset, as_field="assigned_users_attr"):
"""Attach assigned users as json column to each object of the queryset.
:param queryset: A Django user stories queryset object.
:param as_field: Attach assigned as an attribute with this name.
:return: Queryset object with the additional `as_field` field.
"""
model = queryset.model
sql = """SELECT "userstories_userstory_assigned_users"."user_id" AS "user_id"
FROM "userstories_userstory_assigned_users"
WHERE "userstories_userstory_assigned_users"."userstory_id" = {tbl}.id"""
sql = sql.format(tbl=model._meta.db_table)
queryset = queryset.extra(select={as_field: sql})
return queryset

View File

@ -345,6 +345,8 @@ class UserStorySerializer(CustomAttributesValuesWebhookSerializerMixin, serializ
created_date = Field() created_date = Field()
modified_date = Field() modified_date = Field()
finish_date = Field() finish_date = Field()
due_date = Field()
due_date_reason = Field()
subject = Field() subject = Field()
client_requirement = Field() client_requirement = Field()
team_requirement = Field() team_requirement = Field()
@ -359,6 +361,7 @@ class UserStorySerializer(CustomAttributesValuesWebhookSerializerMixin, serializ
permalink = serializers.SerializerMethodField("get_permalink") permalink = serializers.SerializerMethodField("get_permalink")
owner = UserSerializer() owner = UserSerializer()
assigned_to = UserSerializer() assigned_to = UserSerializer()
assigned_users = MethodField()
points = MethodField() points = MethodField()
status = UserStoryStatusSerializer() status = UserStoryStatusSerializer()
milestone = MilestoneSerializer() milestone = MilestoneSerializer()
@ -369,6 +372,13 @@ class UserStorySerializer(CustomAttributesValuesWebhookSerializerMixin, serializ
def custom_attributes_queryset(self, project): def custom_attributes_queryset(self, project):
return project.userstorycustomattributes.all() return project.userstorycustomattributes.all()
def get_assigned_users(self, obj):
"""Get the assigned of an object.
:return: User queryset object representing the assigned users
"""
return [user.id for user in obj.assigned_users.all()]
def get_watchers(self, obj): def get_watchers(self, obj):
return list(obj.get_watchers().values_list("id", flat=True)) return list(obj.get_watchers().values_list("id", flat=True))
@ -386,6 +396,8 @@ class TaskSerializer(CustomAttributesValuesWebhookSerializerMixin, serializers.L
created_date = Field() created_date = Field()
modified_date = Field() modified_date = Field()
finished_date = Field() finished_date = Field()
due_date = Field()
due_date_reason = Field()
subject = Field() subject = Field()
us_order = Field() us_order = Field()
taskboard_order = Field() taskboard_order = Field()
@ -424,6 +436,8 @@ class IssueSerializer(CustomAttributesValuesWebhookSerializerMixin, serializers.
created_date = Field() created_date = Field()
modified_date = Field() modified_date = Field()
finished_date = Field() finished_date = Field()
due_date = Field()
due_date_reason = Field()
subject = Field() subject = Field()
external_reference = Field() external_reference = Field()
watchers = MethodField() watchers = MethodField()

View File

@ -292,6 +292,17 @@ class UserStoryFactory(Factory):
status = factory.SubFactory("tests.factories.UserStoryStatusFactory") status = factory.SubFactory("tests.factories.UserStoryStatusFactory")
milestone = factory.SubFactory("tests.factories.MilestoneFactory") milestone = factory.SubFactory("tests.factories.MilestoneFactory")
tags = factory.Faker("words") tags = factory.Faker("words")
due_date = factory.LazyAttribute(lambda o: date.today() + timedelta(days=7))
due_date_reason = factory.Faker("words")
@factory.post_generation
def assigned_users(self, create, users_list, **kwargs):
if not create:
return
if users_list:
for user in users_list:
self.assigned_users.add(user)
class TaskFactory(Factory): class TaskFactory(Factory):
@ -308,6 +319,8 @@ class TaskFactory(Factory):
milestone = factory.SubFactory("tests.factories.MilestoneFactory") milestone = factory.SubFactory("tests.factories.MilestoneFactory")
user_story = factory.SubFactory("tests.factories.UserStoryFactory") user_story = factory.SubFactory("tests.factories.UserStoryFactory")
tags = factory.Faker("words") tags = factory.Faker("words")
due_date = factory.LazyAttribute(lambda o: date.today() + timedelta(days=7))
due_date_reason = factory.Faker("words")
class IssueFactory(Factory): class IssueFactory(Factory):
@ -326,6 +339,8 @@ class IssueFactory(Factory):
type = factory.SubFactory("tests.factories.IssueTypeFactory") type = factory.SubFactory("tests.factories.IssueTypeFactory")
milestone = factory.SubFactory("tests.factories.MilestoneFactory") milestone = factory.SubFactory("tests.factories.MilestoneFactory")
tags = factory.Faker("words") tags = factory.Faker("words")
due_date = factory.LazyAttribute(lambda o: date.today() + timedelta(days=7))
due_date_reason = factory.Faker("words")
class WikiPageFactory(Factory): class WikiPageFactory(Factory):

View File

@ -591,9 +591,9 @@ def test_custom_fields_csv_generation():
data.seek(0) data.seek(0)
reader = csv.reader(data) reader = csv.reader(data)
row = next(reader) row = next(reader)
assert row[23] == attr.name assert row[25] == attr.name
row = next(reader) row = next(reader)
assert row[23] == "val1" assert row[25] == "val1"
def test_api_validator_assigned_to_when_update_issues(client): def test_api_validator_assigned_to_when_update_issues(client):

View File

@ -574,9 +574,9 @@ def test_custom_fields_csv_generation():
data.seek(0) data.seek(0)
reader = csv.reader(data) reader = csv.reader(data)
row = next(reader) row = next(reader)
assert row[24] == attr.name assert row[26] == attr.name
row = next(reader) row = next(reader)
assert row[24] == "val1" assert row[26] == "val1"
def test_get_tasks_including_attachments(client): def test_get_tasks_including_attachments(client):

View File

@ -17,6 +17,9 @@
# You should have received a copy of the GNU Affero General Public License # You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytz
from datetime import datetime, timedelta
import pytest import pytest
from .. import factories from .. import factories
@ -445,10 +448,12 @@ def test_delete_membership_timeline():
def test_comment_user_story_timeline(): def test_comment_user_story_timeline():
user_story = factories.UserStoryFactory.create(subject="test us timeline") user_story = factories.UserStoryFactory.create(subject="test us timeline")
history_services.take_snapshot(user_story, user=user_story.owner) history_services.take_snapshot(user_story, user=user_story.owner)
history_services.take_snapshot(user_story, user=user_story.owner, comment="testing comment") history_services.take_snapshot(user_story, user=user_story.owner,
comment="testing comment")
project_timeline = service.get_project_timeline(user_story.project) project_timeline = service.get_project_timeline(user_story.project)
assert project_timeline[0].event_type == "userstories.userstory.change" assert project_timeline[0].event_type == "userstories.userstory.change"
assert project_timeline[0].data["userstory"]["subject"] == "test us timeline" assert project_timeline[0].data["userstory"]["subject"] \
== "test us timeline"
assert project_timeline[0].data["comment"] == "testing comment" assert project_timeline[0].data["comment"] == "testing comment"
@ -462,13 +467,59 @@ def test_owner_user_story_timeline():
def test_assigned_to_user_story_timeline(): def test_assigned_to_user_story_timeline():
membership = factories.MembershipFactory.create() membership = factories.MembershipFactory.create()
user_story = factories.UserStoryFactory.create(subject="test us timeline", assigned_to=membership.user, project=membership.project) user_story = factories.UserStoryFactory.create(subject="test us timeline",
assigned_to=membership.user,
project=membership.project)
history_services.take_snapshot(user_story, user=user_story.owner) history_services.take_snapshot(user_story, user=user_story.owner)
user_timeline = service.get_profile_timeline(user_story.assigned_to) user_timeline = service.get_profile_timeline(user_story.assigned_to)
assert user_timeline[0].event_type == "userstories.userstory.create" assert user_timeline[0].event_type == "userstories.userstory.create"
assert user_timeline[0].data["userstory"]["subject"] == "test us timeline" assert user_timeline[0].data["userstory"]["subject"] == "test us timeline"
def test_due_date_user_story_timeline():
initial_due_date = datetime.now(pytz.utc) + timedelta(days=1)
membership = factories.MembershipFactory.create()
user_story = factories.UserStoryFactory.create(subject="test us timeline",
due_date=initial_due_date,
project=membership.project)
history_services.take_snapshot(user_story, user=user_story.owner)
new_due_date = datetime.now(pytz.utc) + timedelta(days=3)
user_story.due_date = new_due_date
user_story.save()
history_services.take_snapshot(user_story, user=user_story.owner)
user_timeline = service.get_profile_timeline(user_story.owner)
assert user_timeline[0].event_type == "userstories.userstory.change"
assert user_timeline[0].data["values_diff"]['due_date'] == [str(initial_due_date.date()),
str(new_due_date.date())]
def test_assigned_users_user_story_timeline():
membership = factories.MembershipFactory.create()
user_story = factories.UserStoryFactory.create(subject="test us timeline",
project=membership.project)
history_services.take_snapshot(user_story, user=user_story.owner)
user_timeline = service.get_profile_timeline(user_story.owner)
assert user_timeline[0].event_type == "userstories.userstory.create"
assert user_timeline[0].data["userstory"]["subject"] == "test us timeline"
user_story.assigned_to = membership.user
user_story.assigned_users = (membership.user,)
user_story.save()
history_services.take_snapshot(user_story, user=user_story.owner)
user_timeline = service.get_profile_timeline(user_story.owner)
assert user_timeline[0].event_type == "userstories.userstory.change"
assert "assigned_to" not in user_timeline[0].data["values_diff"].keys()
assert user_timeline[0].data["values_diff"]['assigned_users'] == \
[None, membership.user.username]
def test_user_data_for_non_system_users(): def test_user_data_for_non_system_users():
user_story = factories.UserStoryFactory.create(subject="test us timeline") user_story = factories.UserStoryFactory.create(subject="test us timeline")
history_services.take_snapshot(user_story, user=user_story.owner) history_services.take_snapshot(user_story, user=user_story.owner)
@ -497,8 +548,10 @@ def test_user_data_for_unactived_users():
serialized_obj.data["data"]["user"]["is_profile_visible"] = False serialized_obj.data["data"]["user"]["is_profile_visible"] = False
serialized_obj.data["data"]["user"]["username"] = "deleted-user" serialized_obj.data["data"]["user"]["username"] = "deleted-user"
def test_timeline_error_use_member_ids_instead_of_memberships_ids(): def test_timeline_error_use_member_ids_instead_of_memberships_ids():
user_story = factories.UserStoryFactory.create(subject="test error use member ids instead of " user_story = factories.UserStoryFactory.create(
subject="test error use member ids instead of "
"memberships ids") "memberships ids")
member_user = user_story.owner member_user = user_story.owner

View File

@ -69,6 +69,44 @@ def test_update_userstories_order_in_bulk():
models.UserStory) models.UserStory)
def test_create_userstory_with_assign_to(client):
user = f.UserFactory.create()
user_watcher = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user)
f.MembershipFactory.create(project=project, user=user, is_admin=True)
f.MembershipFactory.create(project=project, user=user_watcher,
is_admin=True)
url = reverse("userstories-list")
data = {"subject": "Test user story", "project": project.id,
"assigned_to": user.id}
client.login(user)
response = client.json.post(url, json.dumps(data))
assert response.status_code == 201
assert response.data["assigned_to"] == user.id
def test_create_userstory_with_assigned_users(client):
user = f.UserFactory.create()
user_watcher = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user)
f.MembershipFactory.create(project=project, user=user, is_admin=True)
f.MembershipFactory.create(project=project, user=user_watcher,
is_admin=True)
url = reverse("userstories-list")
data = {"subject": "Test user story", "project": project.id,
"assigned_users": [user.id, user_watcher.id]}
client.login(user)
json_data = json.dumps(data)
response = client.json.post(url, json_data)
assert response.status_code == 201
assert response.data["assigned_users"] == set([user.id, user_watcher.id])
def test_create_userstory_with_watchers(client): def test_create_userstory_with_watchers(client):
user = f.UserFactory.create() user = f.UserFactory.create()
user_watcher = f.UserFactory.create() user_watcher = f.UserFactory.create()
@ -883,13 +921,16 @@ def test_get_valid_csv(client):
url = reverse("userstories-csv") url = reverse("userstories-csv")
project = f.ProjectFactory.create(userstories_csv_uuid=uuid.uuid4().hex) project = f.ProjectFactory.create(userstories_csv_uuid=uuid.uuid4().hex)
response = client.get("{}?uuid={}".format(url, project.userstories_csv_uuid)) response = client.get(
"{}?uuid={}".format(url, project.userstories_csv_uuid))
assert response.status_code == 200 assert response.status_code == 200
def test_custom_fields_csv_generation(): def test_custom_fields_csv_generation():
project = f.ProjectFactory.create(userstories_csv_uuid=uuid.uuid4().hex) project = f.ProjectFactory.create(userstories_csv_uuid=uuid.uuid4().hex)
attr = f.UserStoryCustomAttributeFactory.create(project=project, name="attr1", description="desc") attr = f.UserStoryCustomAttributeFactory.create(project=project,
name="attr1",
description="desc")
us = f.UserStoryFactory.create(project=project) us = f.UserStoryFactory.create(project=project)
attr_values = us.custom_attributes_values attr_values = us.custom_attributes_values
attr_values.attributes_values = {str(attr.id): "val1"} attr_values.attributes_values = {str(attr.id): "val1"}
@ -899,17 +940,20 @@ def test_custom_fields_csv_generation():
data.seek(0) data.seek(0)
reader = csv.reader(data) reader = csv.reader(data)
row = next(reader) row = next(reader)
assert row[28] == attr.name
assert row.pop() == attr.name
row = next(reader) row = next(reader)
assert row[28] == "val1" assert row.pop() == "val1"
def test_update_userstory_respecting_watchers(client): def test_update_userstory_respecting_watchers(client):
watching_user = f.create_user() watching_user = f.create_user()
project = f.ProjectFactory.create() project = f.ProjectFactory.create()
us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project) us = f.UserStoryFactory.create(project=project, status__project=project,
milestone__project=project)
us.add_watcher(watching_user) us.add_watcher(watching_user)
f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True) f.MembershipFactory.create(project=us.project, user=us.owner,
is_admin=True)
f.MembershipFactory.create(project=us.project, user=watching_user) f.MembershipFactory.create(project=us.project, user=watching_user)
client.login(user=us.owner) client.login(user=us.owner)
@ -925,8 +969,10 @@ def test_update_userstory_respecting_watchers(client):
def test_update_userstory_update_watchers(client): def test_update_userstory_update_watchers(client):
watching_user = f.create_user() watching_user = f.create_user()
project = f.ProjectFactory.create() project = f.ProjectFactory.create()
us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project) us = f.UserStoryFactory.create(project=project, status__project=project,
f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True) milestone__project=project)
f.MembershipFactory.create(project=us.project, user=us.owner,
is_admin=True)
f.MembershipFactory.create(project=us.project, user=watching_user) f.MembershipFactory.create(project=us.project, user=watching_user)
client.login(user=us.owner) client.login(user=us.owner)
@ -943,9 +989,11 @@ def test_update_userstory_update_watchers(client):
def test_update_userstory_remove_watchers(client): def test_update_userstory_remove_watchers(client):
watching_user = f.create_user() watching_user = f.create_user()
project = f.ProjectFactory.create() project = f.ProjectFactory.create()
us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project) us = f.UserStoryFactory.create(project=project, status__project=project,
milestone__project=project)
us.add_watcher(watching_user) us.add_watcher(watching_user)
f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True) f.MembershipFactory.create(project=us.project, user=us.owner,
is_admin=True)
f.MembershipFactory.create(project=us.project, user=watching_user) f.MembershipFactory.create(project=us.project, user=watching_user)
client.login(user=us.owner) client.login(user=us.owner)
@ -961,8 +1009,10 @@ def test_update_userstory_remove_watchers(client):
def test_update_userstory_update_tribe_gig(client): def test_update_userstory_update_tribe_gig(client):
project = f.ProjectFactory.create() project = f.ProjectFactory.create()
us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project) us = f.UserStoryFactory.create(project=project, status__project=project,
f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True) milestone__project=project)
f.MembershipFactory.create(project=us.project, user=us.owner,
is_admin=True)
url = reverse("userstories-detail", kwargs={"pk": us.pk}) url = reverse("userstories-detail", kwargs={"pk": us.pk})
data = { data = {

View File

@ -82,6 +82,38 @@ def test_webhooks_when_update_user_story(settings):
assert data["change"]["diff"]["subject"]["from"] != data["data"]["subject"] assert data["change"]["diff"]["subject"]["from"] != data["data"]["subject"]
def test_webhooks_when_update_assigned_users_user_story(settings):
settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory()
f.WebhookFactory.create(project=project)
f.WebhookFactory.create(project=project)
obj = f.UserStoryFactory.create(project=project)
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner)
assert send_request_mock.call_count == 2
user = f.create_user()
obj.assigned_users.add(user)
obj.save()
with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
services.take_snapshot(obj, user=obj.owner,)
assert send_request_mock.call_count == 2
(webhook_id, url, key, data) = send_request_mock.call_args[0]
assert data["action"] == "change"
assert data["type"] == "userstory"
assert data["by"]["id"] == obj.owner.id
assert len(data["data"]["assigned_users"]) == \
obj.assigned_users.count()
assert data["data"]["assigned_users"] == [user.id]
assert not data["change"]["diff"]["assigned_users"]["from"]
assert data["change"]["diff"]["assigned_users"]["to"] == user.username
def test_webhooks_when_delete_user_story(settings): def test_webhooks_when_delete_user_story(settings):
settings.WEBHOOKS_ENABLED = True settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory() project = f.ProjectFactory()

View File

@ -0,0 +1,22 @@
import datetime as dt
from unittest import mock
import pytest
from django.utils import timezone
from taiga.projects.due_dates.serializers import DueDateSerializerMixin
@pytest.mark.parametrize('due_date, is_closed, expected', [
(None, False, 'not_set'),
(dt.date(2100, 1, 1), True, 'no_longer_applicable'),
(dt.date(2100, 12, 31), False, 'set'),
(dt.date(2000, 1, 1), False, 'past_due'),
(timezone.now().date(), False, 'due_soon'),
])
def test_due_date_status(due_date, is_closed, expected):
serializer = DueDateSerializerMixin()
obj_status = mock.MagicMock(is_closed=is_closed)
obj = mock.MagicMock(due_date=due_date, status=obj_status)
status = serializer.get_due_date_status(obj)
assert status == expected