+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as
+# published by the Free Software Foundation, either version 3 of the
+# License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+import datetime as dt
+
+from django.utils import timezone
+
+from taiga.base.api import serializers
+from taiga.base.fields import Field, MethodField
+
+
+class DueDateSerializerMixin(serializers.LightSerializer):
+ due_date = Field()
+ due_date_reason = Field()
+ due_date_status = MethodField()
+
+ THRESHOLD = 14
+
+ def get_due_date_status(self, obj):
+ if obj.due_date is None:
+ return 'not_set'
+ elif obj.status and obj.status.is_closed:
+ return 'no_longer_applicable'
+ elif timezone.now().date() > obj.due_date:
+ return 'past_due'
+ elif (timezone.now().date() + dt.timedelta(
+ days=self.THRESHOLD)) >= obj.due_date:
+ return 'due_soon'
+ else:
+ return 'set'
diff --git a/taiga/projects/history/freeze_impl.py b/taiga/projects/history/freeze_impl.py
index 1b5738b2..85c47d4f 100644
--- a/taiga/projects/history/freeze_impl.py
+++ b/taiga/projects/history/freeze_impl.py
@@ -92,11 +92,15 @@ def _common_users_values(diff):
users.update(diff["owner"])
if "assigned_to" in diff:
users.update(diff["assigned_to"])
+ if "assigned_users" in diff:
+ [users.update(usrs_ids) for usrs_ids in diff["assigned_users"] if
+ usrs_ids]
if users:
values["users"] = _get_users_values(users)
return values
+
def project_values(diff):
values = _common_users_values(diff)
return values
@@ -332,6 +336,12 @@ def userstory_freezer(us) -> dict:
for rp in rpqsd:
points[str(rp.role_id)] = rp.points_id
+ assigned_users = [u.id for u in us.assigned_users.all()]
+ # Due to multiple assignment migration, for new snapshots we add to
+ # assigned users a list with the 'assigned to' value
+ if us.assigned_to_id and not assigned_users:
+ assigned_users = [us.assigned_to_id]
+
snapshot = {
"ref": us.ref,
"owner": us.owner_id,
@@ -345,6 +355,7 @@ def userstory_freezer(us) -> dict:
"description": us.description,
"description_html": mdrender(us.project, us.description),
"assigned_to": us.assigned_to_id,
+ "assigned_users": assigned_users,
"milestone": us.milestone_id,
"client_requirement": us.client_requirement,
"team_requirement": us.team_requirement,
@@ -357,6 +368,7 @@ def userstory_freezer(us) -> dict:
"blocked_note_html": mdrender(us.project, us.blocked_note),
"custom_attributes": extract_user_story_custom_attributes(us),
"tribe_gig": us.tribe_gig,
+ "due_date": str(us.due_date) if us.due_date else None
}
return snapshot
@@ -381,6 +393,7 @@ def issue_freezer(issue) -> dict:
"blocked_note": issue.blocked_note,
"blocked_note_html": mdrender(issue.project, issue.blocked_note),
"custom_attributes": extract_issue_custom_attributes(issue),
+ "due_date": str(issue.due_date) if issue.due_date else None
}
return snapshot
@@ -406,6 +419,7 @@ def task_freezer(task) -> dict:
"blocked_note": task.blocked_note,
"blocked_note_html": mdrender(task.project, task.blocked_note),
"custom_attributes": extract_task_custom_attributes(task),
+ "due_date": str(task.due_date) if task.due_date else None
}
return snapshot
diff --git a/taiga/projects/history/models.py b/taiga/projects/history/models.py
index 88fd3c57..d24806f6 100644
--- a/taiga/projects/history/models.py
+++ b/taiga/projects/history/models.py
@@ -176,6 +176,15 @@ class HistoryEntry(models.Model):
(key, value) = resolve_diff_value(key)
elif key in users_keys:
value = [resolve_value("users", x) for x in self.diff[key]]
+ elif key == "assigned_users":
+ diff_in, diff_out = self.diff[key]
+ value_in = None
+ value_out = None
+ if diff_in:
+ value_in = ", ".join([resolve_value("users", x) for x in diff_in])
+ if diff_out:
+ value_out = ", ".join([resolve_value("users", x) for x in diff_out])
+ value = [value_in, value_out]
elif key == "points":
points = {}
diff --git a/taiga/projects/history/services.py b/taiga/projects/history/services.py
index 27be7cae..4f3487fc 100644
--- a/taiga/projects/history/services.py
+++ b/taiga/projects/history/services.py
@@ -81,10 +81,15 @@ _values_impl_map = {}
# this fields are marked as hidden).
_not_important_fields = {
"epics.epic": frozenset(["epics_order", "user_stories"]),
- "userstories.userstory": frozenset(["backlog_order", "sprint_order", "kanban_order"]),
+ "userstories.userstory": frozenset(
+ ["backlog_order", "sprint_order", "kanban_order"]),
"tasks.task": frozenset(["us_order", "taskboard_order"]),
}
+_deprecated_fields = {
+ "userstories.userstory": frozenset(["assigned_to"]),
+}
+
log = logging.getLogger("taiga.history")
@@ -191,7 +196,8 @@ def freeze_model_instance(obj: object) -> FrozenObj:
key = make_key_from_model_object(obj)
impl_fn = _freeze_impl_map[typename]
snapshot = impl_fn(obj)
- assert isinstance(snapshot, dict), "freeze handlers should return always a dict"
+ assert isinstance(snapshot, dict), \
+ "freeze handlers should return always a dict"
return FrozenObj(key, snapshot)
@@ -216,12 +222,46 @@ def is_hidden_snapshot(obj: FrozenDiff) -> bool:
return False
-def make_diff(oldobj: FrozenObj, newobj: FrozenObj) -> FrozenDiff:
+def get_excluded_fields(typename: str) -> tuple:
+ """
+ Get excluded and deprected fields to avoid in the diff
+ """
+ return _deprecated_fields.get(typename, ())
+
+
+def migrate_userstory_diff(obj: FrozenObj) -> FrozenObj:
+ # Due to multiple assignment migration, for old snapshots we add a list
+ # with the 'assigned to' value
+ if 'assigned_users' not in obj.snapshot.keys():
+ snapshot = deepcopy(obj.snapshot)
+ snapshot['assigned_users'] = [obj.snapshot['assigned_to']]
+
+ obj = FrozenObj(obj.key, snapshot)
+
+ return obj
+
+
+_migrations = {"userstories.userstory": migrate_userstory_diff}
+
+
+def migrate_to_last_version(typename: str, obj: FrozenObj) -> FrozenObj:
+ """""
+ Adapt old snapshots to the last format in order to generate correct diffs.
+ :param typename:
+ :param obj:
+ :return:
+ """
+ return _migrations.get(typename, lambda x: x)(obj)
+
+
+def make_diff(oldobj: FrozenObj, newobj: FrozenObj,
+ excluded_keys: tuple = ()) -> FrozenDiff:
"""
Compute a diff between two frozen objects.
"""
- assert isinstance(newobj, FrozenObj), "newobj parameter should be instance of FrozenObj"
+ assert isinstance(newobj, FrozenObj), \
+ "newobj parameter should be instance of FrozenObj"
if oldobj is None:
return FrozenDiff(newobj.key, {}, newobj.snapshot)
@@ -229,7 +269,7 @@ def make_diff(oldobj: FrozenObj, newobj: FrozenObj) -> FrozenDiff:
first = oldobj.snapshot
second = newobj.snapshot
- diff = make_diff_from_dicts(first, second)
+ diff = make_diff_from_dicts(first, second, None, excluded_keys)
return FrozenDiff(newobj.key, diff, newobj.snapshot)
@@ -242,7 +282,8 @@ def make_diff_values(typename: str, fdiff: FrozenDiff) -> dict:
"""
if typename not in _values_impl_map:
- log.warning("No implementation found of '{}' for values.".format(typename))
+ log.warning(
+ "No implementation found of '{}' for values.".format(typename))
return {}
impl_fn = _values_impl_map[typename]
@@ -294,10 +335,12 @@ def get_modified_fields(obj: object, last_modifications):
"""
key = make_key_from_model_object(obj)
entry_model = apps.get_model("history", "HistoryEntry")
- history_entries = (entry_model.objects
- .filter(key=key)
- .order_by("-created_at")
- .values_list("diff", flat=True)[0:last_modifications])
+ history_entries = (
+ entry_model.objects.filter(key=key)
+ .order_by("-created_at")
+ .values_list("diff",
+ flat=True)[0:last_modifications]
+ )
modified_fields = []
for history_entry in history_entries:
@@ -307,7 +350,8 @@ def get_modified_fields(obj: object, last_modifications):
@tx.atomic
-def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False):
+def take_snapshot(obj: object, *, comment: str="", user=None,
+ delete: bool=False):
"""
Given any model instance with registred content type,
create new history entry of "change" type.
@@ -323,6 +367,10 @@ def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False
new_fobj = freeze_model_instance(obj)
old_fobj, need_real_snapshot = get_last_snapshot_for_key(key)
+ # migrate diff to latest schema
+ if old_fobj:
+ old_fobj = migrate_to_last_version(typename, old_fobj)
+
entry_model = apps.get_model("history", "HistoryEntry")
user_id = None if user is None else user.id
user_name = "" if user is None else user.get_full_name()
@@ -338,11 +386,15 @@ def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False
else:
raise RuntimeError("Unexpected condition")
- fdiff = make_diff(old_fobj, new_fobj)
+ excluded_fields = get_excluded_fields(typename)
+
+ fdiff = make_diff(old_fobj, new_fobj, excluded_fields)
# If diff and comment are empty, do
# not create empty history entry
- if (not fdiff.diff and not comment and old_fobj is not None and entry_type != HistoryType.delete):
+ if (not fdiff.diff and
+ not comment and old_fobj is not None and
+ entry_type != HistoryType.delete):
return None
fvals = make_diff_values(typename, fdiff)
@@ -371,7 +423,8 @@ def take_snapshot(obj: object, *, comment: str="", user=None, delete: bool=False
# High level query api
-def get_history_queryset_by_model_instance(obj: object, types=(HistoryType.change,),
+def get_history_queryset_by_model_instance(obj: object,
+ types=(HistoryType.change,),
include_hidden=False):
"""
Get one page of history for specified object.
@@ -391,16 +444,18 @@ def prefetch_owners_in_history_queryset(qs):
users = get_user_model().objects.filter(id__in=user_ids)
users_by_id = {u.id: u for u in users}
for history_entry in qs:
- history_entry.prefetch_owner(users_by_id.get(history_entry.user["pk"], None))
+ history_entry.prefetch_owner(users_by_id.get(history_entry.user["pk"],
+ None))
return qs
# Freeze & value register
register_freeze_implementation("projects.project", project_freezer)
-register_freeze_implementation("milestones.milestone", milestone_freezer,)
+register_freeze_implementation("milestones.milestone", milestone_freezer)
register_freeze_implementation("epics.epic", epic_freezer)
-register_freeze_implementation("epics.relateduserstory", epic_related_userstory_freezer)
+register_freeze_implementation("epics.relateduserstory",
+ epic_related_userstory_freezer)
register_freeze_implementation("userstories.userstory", userstory_freezer)
register_freeze_implementation("issues.issue", issue_freezer)
register_freeze_implementation("tasks.task", task_freezer)
@@ -409,7 +464,8 @@ register_freeze_implementation("wiki.wikipage", wikipage_freezer)
register_values_implementation("projects.project", project_values)
register_values_implementation("milestones.milestone", milestone_values)
register_values_implementation("epics.epic", epic_values)
-register_values_implementation("epics.relateduserstory", epic_related_userstory_values)
+register_values_implementation("epics.relateduserstory",
+ epic_related_userstory_values)
register_values_implementation("userstories.userstory", userstory_values)
register_values_implementation("issues.issue", issue_values)
register_values_implementation("tasks.task", task_values)
diff --git a/taiga/projects/history/templates/emails/includes/fields_diff-html.jinja b/taiga/projects/history/templates/emails/includes/fields_diff-html.jinja
index 9d3473e7..2e6138e7 100644
--- a/taiga/projects/history/templates/emails/includes/fields_diff-html.jinja
+++ b/taiga/projects/history/templates/emails/includes/fields_diff-html.jinja
@@ -149,6 +149,60 @@
{% endif %}
+ {# ASSIGNED TO #}
+ {% elif field_name == "due_date" %}
+
+
+ {{ verbose_name(obj_class, field_name) }}
+ |
+
+ {% if values.0 != None and values.0 != "" %}
+ {{ _("from") }}
+ {{ values.0|date }}
+ {% else %}
+ {{ _("from") }}
+ {{ _("Not set") }}
+ {% endif %}
+ |
+
+
+
+ {% if values.1 != None and values.1 != "" %}
+ {{ _("to") }}
+ {{ values.1|date }}
+ {% else %}
+ {{ _("to") }}
+ {{ _("Not set") }}
+ {% endif %}
+ |
+
+ {# ASSIGNED users #}
+ {% elif field_name == "assigned_users" %}
+
+
+ {{ verbose_name(obj_class, field_name) }}
+ |
+
+ {% if values.0 != None and values.0 != "" %}
+ {{ _("from") }}
+ {{ values.0 }}
+ {% else %}
+ {{ _("from") }}
+ {{ _("Unassigned") }}
+ {% endif %}
+ |
+
+
+
+ {% if values.1 != None and values.1 != "" %}
+ {{ _("to") }}
+ {{ values.1 }}
+ {% else %}
+ {{ _("to") }}
+ {{ _("Unassigned") }}
+ {% endif %}
+ |
+
{# * #}
{% else %}
diff --git a/taiga/projects/issues/migrations/0008_add_due_date.py b/taiga/projects/issues/migrations/0008_add_due_date.py
new file mode 100644
index 00000000..96a95c41
--- /dev/null
+++ b/taiga/projects/issues/migrations/0008_add_due_date.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2018-04-09 09:06
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('issues', '0007_auto_20160614_1201'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='issue',
+ name='due_date',
+ field=models.DateField(blank=True, default=None, null=True, verbose_name='due date'),
+ ),
+ migrations.AddField(
+ model_name='issue',
+ name='due_date_reason',
+ field=models.TextField(blank=True, default='', verbose_name='reason for the due date'),
+ ),
+ ]
diff --git a/taiga/projects/issues/models.py b/taiga/projects/issues/models.py
index c2f3696b..2b1b7f49 100644
--- a/taiga/projects/issues/models.py
+++ b/taiga/projects/issues/models.py
@@ -24,13 +24,14 @@ from django.utils import timezone
from django.dispatch import receiver
from django.utils.translation import ugettext_lazy as _
+from taiga.projects.due_dates.models import DueDateMixin
from taiga.projects.occ import OCCModelMixin
from taiga.projects.notifications.mixins import WatchedModelMixin
from taiga.projects.mixins.blocked import BlockedMixin
from taiga.projects.tagging.models import TaggedMixin
-class Issue(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, models.Model):
+class Issue(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, DueDateMixin, models.Model):
ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None,
verbose_name=_("ref"))
owner = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, default=None,
diff --git a/taiga/projects/issues/serializers.py b/taiga/projects/issues/serializers.py
index 80057dcc..f27fb973 100644
--- a/taiga/projects/issues/serializers.py
+++ b/taiga/projects/issues/serializers.py
@@ -21,6 +21,7 @@ from taiga.base.fields import Field, MethodField
from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender
+from taiga.projects.due_dates.serializers import DueDateSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
@@ -33,7 +34,8 @@ from taiga.projects.votes.mixins.serializers import VoteResourceSerializerMixin
class IssueListSerializer(VoteResourceSerializerMixin, WatchedResourceSerializer,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin,
- TaggedInProjectResourceSerializer, serializers.LightSerializer):
+ DueDateSerializerMixin, TaggedInProjectResourceSerializer,
+ serializers.LightSerializer):
id = Field()
ref = Field()
severity = Field(attr="severity_id")
diff --git a/taiga/projects/issues/services.py b/taiga/projects/issues/services.py
index dbcb0d4e..1a59bcb7 100644
--- a/taiga/projects/issues/services.py
+++ b/taiga/projects/issues/services.py
@@ -82,7 +82,8 @@ def issues_to_csv(project, queryset):
"sprint_estimated_finish", "owner", "owner_full_name", "assigned_to",
"assigned_to_full_name", "status", "severity", "priority", "type",
"is_closed", "attachments", "external_reference", "tags", "watchers",
- "voters", "created_date", "modified_date", "finished_date"]
+ "voters", "created_date", "modified_date", "finished_date", "due_date",
+ "due_date_reason"]
custom_attrs = project.issuecustomattributes.all()
for custom_attr in custom_attrs:
@@ -125,6 +126,8 @@ def issues_to_csv(project, queryset):
"created_date": issue.created_date,
"modified_date": issue.modified_date,
"finished_date": issue.finished_date,
+ "due_date": issue.due_date,
+ "due_date_reason": issue.due_date_reason,
}
for custom_attr in custom_attrs:
diff --git a/taiga/projects/notifications/admin.py b/taiga/projects/notifications/admin.py
index 1da3c97c..5e258235 100644
--- a/taiga/projects/notifications/admin.py
+++ b/taiga/projects/notifications/admin.py
@@ -32,5 +32,5 @@ class WatchedInline(GenericTabularInline):
class NotifyPolicyInline(TabularInline):
model = models.NotifyPolicy
extra = 0
- readonly_fields = ("notify_level",)
+ readonly_fields = ("notify_level", "live_notify_level")
raw_id_fields = ["user"]
diff --git a/taiga/projects/notifications/migrations/0007_notifypolicy_live_notify_level.py b/taiga/projects/notifications/migrations/0007_notifypolicy_live_notify_level.py
new file mode 100644
index 00000000..73a106cc
--- /dev/null
+++ b/taiga/projects/notifications/migrations/0007_notifypolicy_live_notify_level.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.10.6 on 2017-03-31 13:03
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+import taiga.projects.notifications.choices
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('notifications', '0006_auto_20151103_0954'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='notifypolicy',
+ name='live_notify_level',
+ field=models.SmallIntegerField(choices=[(taiga.projects.notifications.choices.NotifyLevel(1), 'Involved'), (taiga.projects.notifications.choices.NotifyLevel(2), 'All'), (taiga.projects.notifications.choices.NotifyLevel(3), 'None')], default=taiga.projects.notifications.choices.NotifyLevel(1)),
+ ),
+ ]
diff --git a/taiga/projects/notifications/models.py b/taiga/projects/notifications/models.py
index a8dc3e96..dbf67353 100644
--- a/taiga/projects/notifications/models.py
+++ b/taiga/projects/notifications/models.py
@@ -36,6 +36,7 @@ class NotifyPolicy(models.Model):
project = models.ForeignKey("projects.Project", related_name="notify_policies")
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="notify_policies")
notify_level = models.SmallIntegerField(choices=NOTIFY_LEVEL_CHOICES)
+ live_notify_level = models.SmallIntegerField(choices=NOTIFY_LEVEL_CHOICES, default=NotifyLevel.involved)
created_at = models.DateTimeField(default=timezone.now)
modified_at = models.DateTimeField()
diff --git a/taiga/projects/notifications/serializers.py b/taiga/projects/notifications/serializers.py
index 4387c19c..f55fa191 100644
--- a/taiga/projects/notifications/serializers.py
+++ b/taiga/projects/notifications/serializers.py
@@ -27,7 +27,7 @@ class NotifyPolicySerializer(serializers.ModelSerializer):
class Meta:
model = models.NotifyPolicy
- fields = ('id', 'project', 'project_name', 'notify_level')
+ fields = ('id', 'project', 'project_name', 'notify_level', "live_notify_level")
def get_project_name(self, obj):
return obj.project.name
diff --git a/taiga/projects/notifications/services.py b/taiga/projects/notifications/services.py
index d0155eac..89227370 100644
--- a/taiga/projects/notifications/services.py
+++ b/taiga/projects/notifications/services.py
@@ -38,6 +38,7 @@ from taiga.projects.history.services import (make_key_from_model_object,
get_last_snapshot_for_key,
get_model_from_key)
from taiga.permissions.services import user_has_perm
+from taiga.events import events
from .models import HistoryChangeNotification, Watched
from .squashing import squash_history_entries
@@ -54,7 +55,8 @@ def notify_policy_exists(project, user) -> bool:
return qs.exists()
-def create_notify_policy(project, user, level=NotifyLevel.involved):
+def create_notify_policy(project, user, level=NotifyLevel.involved,
+ live_level=NotifyLevel.involved):
"""
Given a project and user, create notification policy for it.
"""
@@ -62,23 +64,30 @@ def create_notify_policy(project, user, level=NotifyLevel.involved):
try:
return model_cls.objects.create(project=project,
user=user,
- notify_level=level)
+ notify_level=level,
+ live_notify_level=live_level)
except IntegrityError as e:
- raise exc.IntegrityError(_("Notify exists for specified user and project")) from e
+ raise exc.IntegrityError(
+ _("Notify exists for specified user and project")) from e
-def create_notify_policy_if_not_exists(project, user, level=NotifyLevel.involved):
+def create_notify_policy_if_not_exists(project, user,
+ level=NotifyLevel.involved,
+ live_level=NotifyLevel.involved):
"""
Given a project and user, create notification policy for it.
"""
model_cls = apps.get_model("notifications", "NotifyPolicy")
try:
- result = model_cls.objects.get_or_create(project=project,
- user=user,
- defaults={"notify_level": level})
+ result = model_cls.objects.get_or_create(
+ project=project,
+ user=user,
+ defaults={"notify_level": level, "live_notify_level": live_level}
+ )
return result[0]
except IntegrityError as e:
- raise exc.IntegrityError(_("Notify exists for specified user and project")) from e
+ raise exc.IntegrityError(
+ _("Notify exists for specified user and project")) from e
def analize_object_for_watchers(obj: object, comment: str, user: object):
@@ -133,7 +142,7 @@ def _filter_notificable(user):
return user.is_active and not user.is_system
-def get_users_to_notify(obj, *, history=None, discard_users=None) -> list:
+def get_users_to_notify(obj, *, history=None, discard_users=None, live=False) -> list:
"""
Get filtered set of users to notify for specified
model instance and changer.
@@ -145,6 +154,8 @@ def get_users_to_notify(obj, *, history=None, discard_users=None) -> list:
def _check_level(project: object, user: object, levels: tuple) -> bool:
policy = project.cached_notify_policy_for_user(user)
+ if live:
+ return policy.live_notify_level in levels
return policy.notify_level in levels
_can_notify_hard = partial(_check_level, project,
@@ -221,7 +232,6 @@ def send_notifications(obj, *, history):
owner=owner,
project=obj.project,
history_type=history.type))
-
notification.updated_datetime = timezone.now()
notification.save()
notification.history_entries.add(history)
@@ -235,6 +245,10 @@ def send_notifications(obj, *, history):
if settings.CHANGE_NOTIFICATIONS_MIN_INTERVAL == 0:
send_sync_notifications(notification.id)
+ live_notify_users = get_users_to_notify(obj, history=history, discard_users=[notification.owner], live=True)
+ for user in live_notify_users:
+ events.emit_live_notification_for_model(obj, user, history)
+
@transaction.atomic
def send_sync_notifications(notification_id):
@@ -305,6 +319,7 @@ def send_sync_notifications(notification_id):
context["lang"] = user.lang or settings.LANGUAGE_CODE
email.send(user.email, context, headers=headers)
+
notification.delete()
@@ -416,7 +431,11 @@ def add_watcher(obj, user):
project=obj.project)
notify_policy, _ = apps.get_model("notifications", "NotifyPolicy").objects.get_or_create(
- project=obj.project, user=user, defaults={"notify_level": NotifyLevel.involved})
+ project=obj.project,
+ user=user,
+ defaults={"notify_level": NotifyLevel.involved,
+ "live_notify_level": NotifyLevel.involved}
+ )
return watched
@@ -438,22 +457,25 @@ def remove_watcher(obj, user):
qs.delete()
-def set_notify_policy_level(notify_policy, notify_level):
+def set_notify_policy_level(notify_policy, notify_level, live=False):
"""
Set notification level for specified policy.
"""
if notify_level not in [e.value for e in NotifyLevel]:
raise exc.IntegrityError(_("Invalid value for notify level"))
- notify_policy.notify_level = notify_level
+ if live:
+ notify_policy.live_notify_level = notify_level
+ else:
+ notify_policy.notify_level = notify_level
notify_policy.save()
-def set_notify_policy_level_to_ignore(notify_policy):
+def set_notify_policy_level_to_ignore(notify_policy, live=False):
"""
Set notification level for specified policy.
"""
- set_notify_policy_level(notify_policy, NotifyLevel.none)
+ set_notify_policy_level(notify_policy, NotifyLevel.none, live=live)
def make_ms_thread_index(msg_id, dt):
diff --git a/taiga/projects/tasks/migrations/0012_add_due_date.py b/taiga/projects/tasks/migrations/0012_add_due_date.py
new file mode 100644
index 00000000..01efa51a
--- /dev/null
+++ b/taiga/projects/tasks/migrations/0012_add_due_date.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2018-04-09 09:06
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('tasks', '0011_auto_20160928_0755'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='task',
+ name='due_date',
+ field=models.DateField(blank=True, default=None, null=True, verbose_name='due date'),
+ ),
+ migrations.AddField(
+ model_name='task',
+ name='due_date_reason',
+ field=models.TextField(blank=True, default='', verbose_name='reason for the due date'),
+ ),
+ ]
diff --git a/taiga/projects/tasks/models.py b/taiga/projects/tasks/models.py
index 5b7b0045..9f823d0c 100644
--- a/taiga/projects/tasks/models.py
+++ b/taiga/projects/tasks/models.py
@@ -24,13 +24,14 @@ from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from taiga.base.utils.time import timestamp_ms
+from taiga.projects.due_dates.models import DueDateMixin
from taiga.projects.occ import OCCModelMixin
from taiga.projects.notifications.mixins import WatchedModelMixin
from taiga.projects.mixins.blocked import BlockedMixin
from taiga.projects.tagging.models import TaggedMixin
-class Task(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, models.Model):
+class Task(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, DueDateMixin, models.Model):
user_story = models.ForeignKey("userstories.UserStory", null=True, blank=True,
related_name="tasks", verbose_name=_("user story"))
ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None,
diff --git a/taiga/projects/tasks/serializers.py b/taiga/projects/tasks/serializers.py
index 04cab33e..51b0ecbe 100644
--- a/taiga/projects/tasks/serializers.py
+++ b/taiga/projects/tasks/serializers.py
@@ -22,6 +22,7 @@ from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender
from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin
+from taiga.projects.due_dates.serializers import DueDateSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
@@ -36,7 +37,8 @@ class TaskListSerializer(VoteResourceSerializerMixin, WatchedResourceSerializer,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin,
BasicAttachmentsInfoSerializerMixin, TaggedInProjectResourceSerializer,
- TotalCommentsSerializerMixin, serializers.LightSerializer):
+ TotalCommentsSerializerMixin, DueDateSerializerMixin,
+ serializers.LightSerializer):
id = Field()
user_story = Field(attr="user_story_id")
diff --git a/taiga/projects/tasks/services.py b/taiga/projects/tasks/services.py
index c0a6272e..a4ebb748 100644
--- a/taiga/projects/tasks/services.py
+++ b/taiga/projects/tasks/services.py
@@ -121,8 +121,9 @@ def tasks_to_csv(project, queryset):
fieldnames = ["ref", "subject", "description", "user_story", "sprint", "sprint_estimated_start",
"sprint_estimated_finish", "owner", "owner_full_name", "assigned_to",
"assigned_to_full_name", "status", "is_iocaine", "is_closed", "us_order",
- "taskboard_order", "attachments", "external_reference", "tags", "watchers", "voters",
- "created_date", "modified_date", "finished_date"]
+ "taskboard_order", "attachments", "external_reference", "tags", "watchers",
+ "voters", "created_date", "modified_date", "finished_date", "due_date",
+ "due_date_reason"]
custom_attrs = project.taskcustomattributes.all()
for custom_attr in custom_attrs:
@@ -167,6 +168,8 @@ def tasks_to_csv(project, queryset):
"created_date": task.created_date,
"modified_date": task.modified_date,
"finished_date": task.finished_date,
+ "due_date": task.due_date,
+ "due_date_reason": task.due_date_reason,
}
for custom_attr in custom_attrs:
value = task.custom_attributes_values.attributes_values.get(str(custom_attr.id), None)
diff --git a/taiga/projects/userstories/admin.py b/taiga/projects/userstories/admin.py
index f95b433a..818f2abe 100644
--- a/taiga/projects/userstories/admin.py
+++ b/taiga/projects/userstories/admin.py
@@ -67,6 +67,10 @@ class UserStoryAdmin(admin.ModelAdmin):
and getattr(self, 'obj', None)):
kwargs["queryset"] = db_field.related.parent_model.objects.filter(
memberships__project=self.obj.project)
+ elif (db_field.name in ["assigned_users"]
+ and getattr(self, 'obj', None)):
+ kwargs["queryset"] = db_field.related_model.objects.filter(
+ memberships__project=self.obj.project)
return super().formfield_for_manytomany(db_field, request, **kwargs)
diff --git a/taiga/projects/userstories/api.py b/taiga/projects/userstories/api.py
index dc7cef82..fc12b81b 100644
--- a/taiga/projects/userstories/api.py
+++ b/taiga/projects/userstories/api.py
@@ -127,7 +127,6 @@ class UserStoryViewSet(OCCResourceMixin, VotedResourceMixin, HistoryResourceMixi
include_attachments=include_attachments,
include_tasks=include_tasks,
epic_id=epic_id)
-
return qs
def pre_conditions_on_save(self, obj):
diff --git a/taiga/projects/userstories/migrations/0015_add_due_date.py b/taiga/projects/userstories/migrations/0015_add_due_date.py
new file mode 100644
index 00000000..c4025ff4
--- /dev/null
+++ b/taiga/projects/userstories/migrations/0015_add_due_date.py
@@ -0,0 +1,25 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2018-04-09 09:06
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('userstories', '0014_auto_20160928_0540'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='userstory',
+ name='due_date',
+ field=models.DateField(blank=True, default=None, null=True, verbose_name='due date'),
+ ),
+ migrations.AddField(
+ model_name='userstory',
+ name='due_date_reason',
+ field=models.TextField(blank=True, default='', verbose_name='reason for the due date'),
+ ),
+ ]
diff --git a/taiga/projects/userstories/migrations/0016_userstory_assigned_users.py b/taiga/projects/userstories/migrations/0016_userstory_assigned_users.py
new file mode 100644
index 00000000..e83edb34
--- /dev/null
+++ b/taiga/projects/userstories/migrations/0016_userstory_assigned_users.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Generated by Django 1.11.2 on 2018-02-13 10:14
+from __future__ import unicode_literals
+
+from django.conf import settings
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ migrations.swappable_dependency(settings.AUTH_USER_MODEL),
+ ('userstories', '0015_add_due_date'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='userstory',
+ name='assigned_users',
+ field=models.ManyToManyField(blank=True, default=None, related_name='assigned_userstories', to=settings.AUTH_USER_MODEL, verbose_name='assigned users'),
+ ),
+ ]
diff --git a/taiga/projects/userstories/models.py b/taiga/projects/userstories/models.py
index a6f3a414..ed9add87 100644
--- a/taiga/projects/userstories/models.py
+++ b/taiga/projects/userstories/models.py
@@ -26,6 +26,7 @@ from django.utils import timezone
from picklefield.fields import PickledObjectField
from taiga.base.utils.time import timestamp_ms
+from taiga.projects.due_dates.models import DueDateMixin
from taiga.projects.tagging.models import TaggedMixin
from taiga.projects.occ import OCCModelMixin
from taiga.projects.notifications.mixins import WatchedModelMixin
@@ -57,7 +58,7 @@ class RolePoints(models.Model):
return self.user_story.project
-class UserStory(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, models.Model):
+class UserStory(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, DueDateMixin, models.Model):
ref = models.BigIntegerField(db_index=True, null=True, blank=True, default=None,
verbose_name=_("ref"))
milestone = models.ForeignKey("milestones.Milestone", null=True, blank=True,
@@ -96,6 +97,9 @@ class UserStory(OCCModelMixin, WatchedModelMixin, BlockedMixin, TaggedMixin, mod
assigned_to = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True,
default=None, related_name="userstories_assigned_to_me",
verbose_name=_("assigned to"))
+ assigned_users = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True,
+ default=None, related_name="assigned_userstories",
+ verbose_name=_("assigned users"))
client_requirement = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("is client requirement"))
team_requirement = models.BooleanField(default=False, null=False, blank=True,
diff --git a/taiga/projects/userstories/serializers.py b/taiga/projects/userstories/serializers.py
index 3dcd2196..2e34fff4 100644
--- a/taiga/projects/userstories/serializers.py
+++ b/taiga/projects/userstories/serializers.py
@@ -22,6 +22,7 @@ from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender
from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin
+from taiga.projects.due_dates.serializers import DueDateSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
@@ -49,7 +50,7 @@ class UserStoryListSerializer(ProjectExtraInfoSerializerMixin,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, BasicAttachmentsInfoSerializerMixin,
TaggedInProjectResourceSerializer, TotalCommentsSerializerMixin,
- serializers.LightSerializer):
+ DueDateSerializerMixin, serializers.LightSerializer):
id = Field()
ref = Field()
@@ -82,6 +83,24 @@ class UserStoryListSerializer(ProjectExtraInfoSerializerMixin,
epic_order = MethodField()
tasks = MethodField()
+ assigned_users = MethodField()
+
+ def get_assigned_users(self, obj):
+ """Get the assigned of an object.
+
+ :return: User queryset object representing the assigned users
+ """
+ if not obj.assigned_to:
+ return set([user.id for user in obj.assigned_users.all()])
+
+ assigned_users = [user.id for user in obj.assigned_users.all()] + \
+ [obj.assigned_to.id]
+
+ if not assigned_users:
+ return None
+
+ return set(assigned_users)
+
def get_epic_order(self, obj):
include_epic_order = getattr(obj, "include_epic_order", False)
diff --git a/taiga/projects/userstories/services.py b/taiga/projects/userstories/services.py
index 7f9dff20..db711d9a 100644
--- a/taiga/projects/userstories/services.py
+++ b/taiga/projects/userstories/services.py
@@ -38,6 +38,7 @@ from taiga.projects.notifications.utils import attach_watchers_to_queryset
from . import models
+
#####################################################
# Bulk actions
#####################################################
@@ -46,7 +47,8 @@ def get_userstories_from_bulk(bulk_data, **additional_fields):
"""Convert `bulk_data` into a list of user stories.
:param bulk_data: List of user stories in bulk format.
- :param additional_fields: Additional fields when instantiating each user story.
+ :param additional_fields: Additional fields when instantiating each user
+ story.
:return: List of `UserStory` instances.
"""
@@ -54,12 +56,14 @@ def get_userstories_from_bulk(bulk_data, **additional_fields):
for line in text.split_in_lines(bulk_data)]
-def create_userstories_in_bulk(bulk_data, callback=None, precall=None, **additional_fields):
+def create_userstories_in_bulk(bulk_data, callback=None, precall=None,
+ **additional_fields):
"""Create user stories from `bulk_data`.
:param bulk_data: List of user stories in bulk format.
:param callback: Callback to execute after each user story save.
- :param additional_fields: Additional fields when instantiating each user story.
+ :param additional_fields: Additional fields when instantiating each user
+ story.
:return: List of created `Task` instances.
"""
@@ -76,11 +80,13 @@ def create_userstories_in_bulk(bulk_data, callback=None, precall=None, **additio
return userstories
-def update_userstories_order_in_bulk(bulk_data: list, field: str, project: object,
- status: object=None, milestone: object=None):
+def update_userstories_order_in_bulk(bulk_data: list, field: str,
+ project: object,
+ status: object = None,
+ milestone: object = None):
"""
- Updates the order of the userstories specified adding the extra updates needed
- to keep consistency.
+ Updates the order of the userstories specified adding the extra updates
+ needed to keep consistency.
`bulk_data` should be a list of dicts with the following format:
`field` is the order field used
@@ -106,8 +112,8 @@ def update_userstories_order_in_bulk(bulk_data: list, field: str, project: objec
def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object):
"""
- Update the milestone and the milestone order of some user stories adding the
- extra orders needed to keep consistency.
+ Update the milestone and the milestone order of some user stories adding
+ the extra orders needed to keep consistency.
`bulk_data` should be a list of dicts with the following format:
[{'us_id': , 'order': }, ...]
"""
@@ -116,7 +122,8 @@ def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object):
new_us_orders = {}
for e in bulk_data:
new_us_orders[e["us_id"]] = e["order"]
- # The base orders where we apply the new orders must containg all the values
+ # The base orders where we apply the new orders must containg all
+ # the values
us_orders[e["us_id"]] = e["order"]
apply_order_updates(us_orders, new_us_orders)
@@ -128,11 +135,14 @@ def update_userstories_milestone_in_bulk(bulk_data: list, milestone: object):
content_type="userstories.userstory",
projectid=milestone.project.pk)
- db.update_attr_in_bulk_for_ids(us_milestones, "milestone_id", model=models.UserStory)
+ db.update_attr_in_bulk_for_ids(us_milestones, "milestone_id",
+ model=models.UserStory)
db.update_attr_in_bulk_for_ids(us_orders, "sprint_order", models.UserStory)
# Updating the milestone for the tasks
- Task.objects.filter(user_story_id__in=[e["us_id"] for e in bulk_data]).update(milestone=milestone)
+ Task.objects.filter(
+ user_story_id__in=[e["us_id"] for e in bulk_data]).update(
+ milestone=milestone)
return us_orders
@@ -157,7 +167,8 @@ def calculate_userstory_is_closed(user_story):
if user_story.tasks.count() == 0:
return user_story.status is not None and user_story.status.is_closed
- if all([task.status is not None and task.status.is_closed for task in user_story.tasks.all()]):
+ if all([task.status is not None and task.status.is_closed for task in
+ user_story.tasks.all()]):
return True
return False
@@ -183,9 +194,12 @@ def open_userstory(us):
def userstories_to_csv(project, queryset):
csv_data = io.StringIO()
- fieldnames = ["ref", "subject", "description", "sprint", "sprint_estimated_start",
- "sprint_estimated_finish", "owner", "owner_full_name", "assigned_to",
- "assigned_to_full_name", "status", "is_closed"]
+ fieldnames = ["ref", "subject", "description", "sprint",
+ "sprint_estimated_start",
+ "sprint_estimated_finish", "owner", "owner_full_name",
+ "assigned_to",
+ "assigned_to_full_name", "assigned_users",
+ "assigned_users_full_name", "status", "is_closed"]
roles = project.roles.filter(computable=True).order_by('slug')
for role in roles:
@@ -197,7 +211,7 @@ def userstories_to_csv(project, queryset):
"created_date", "modified_date", "finish_date",
"client_requirement", "team_requirement", "attachments",
"generated_from_issue", "external_reference", "tasks",
- "tags", "watchers", "voters"]
+ "tags", "watchers", "voters", "due_date", "due_date_reason"]
custom_attrs = project.userstorycustomattributes.all()
for custom_attr in custom_attrs:
@@ -227,12 +241,21 @@ def userstories_to_csv(project, queryset):
"subject": us.subject,
"description": us.description,
"sprint": us.milestone.name if us.milestone else None,
- "sprint_estimated_start": us.milestone.estimated_start if us.milestone else None,
- "sprint_estimated_finish": us.milestone.estimated_finish if us.milestone else None,
+ "sprint_estimated_start": us.milestone.estimated_start if
+ us.milestone else None,
+ "sprint_estimated_finish": us.milestone.estimated_finish if
+ us.milestone else None,
"owner": us.owner.username if us.owner else None,
"owner_full_name": us.owner.get_full_name() if us.owner else None,
"assigned_to": us.assigned_to.username if us.assigned_to else None,
- "assigned_to_full_name": us.assigned_to.get_full_name() if us.assigned_to else None,
+ "assigned_to_full_name": us.assigned_to.get_full_name() if
+ us.assigned_to else None,
+ "assigned_users": ",".join(
+ [assigned_user.username for assigned_user in
+ us.assigned_users.all()]),
+ "assigned_users_full_name": ",".join(
+ [assigned_user.get_full_name() for assigned_user in
+ us.assigned_users.all()]),
"status": us.status.name if us.status else None,
"is_closed": us.is_closed,
"backlog_order": us.backlog_order,
@@ -244,22 +267,28 @@ def userstories_to_csv(project, queryset):
"client_requirement": us.client_requirement,
"team_requirement": us.team_requirement,
"attachments": us.attachments.count(),
- "generated_from_issue": us.generated_from_issue.ref if us.generated_from_issue else None,
+ "generated_from_issue": us.generated_from_issue.ref if
+ us.generated_from_issue else None,
"external_reference": us.external_reference,
"tasks": ",".join([str(task.ref) for task in us.tasks.all()]),
"tags": ",".join(us.tags or []),
"watchers": us.watchers,
- "voters": us.total_voters
+ "voters": us.total_voters,
+ "due_date": us.due_date,
+ "due_date_reason": us.due_date_reason,
}
- us_role_points_by_role_id = {us_rp.role.id: us_rp.points.value for us_rp in us.role_points.all()}
+ us_role_points_by_role_id = {us_rp.role.id: us_rp.points.value for
+ us_rp in us.role_points.all()}
for role in roles:
- row["{}-points".format(role.slug)] = us_role_points_by_role_id.get(role.id, 0)
+ row["{}-points".format(role.slug)] = \
+ us_role_points_by_role_id.get(role.id, 0)
row['total-points'] = us.get_total_points()
for custom_attr in custom_attrs:
- value = us.custom_attributes_values.attributes_values.get(str(custom_attr.id), None)
+ value = us.custom_attributes_values.attributes_values.get(
+ str(custom_attr.id), None)
row[custom_attr.name] = value
writer.writerow(row)
diff --git a/taiga/projects/userstories/utils.py b/taiga/projects/userstories/utils.py
index 3a903491..880f7139 100644
--- a/taiga/projects/userstories/utils.py
+++ b/taiga/projects/userstories/utils.py
@@ -177,3 +177,22 @@ def attach_extra_info(queryset, user=None, include_attachments=False, include_ta
queryset = attach_is_watcher_to_queryset(queryset, user)
queryset = attach_total_comments_to_queryset(queryset)
return queryset
+
+
+def attach_assigned_users(queryset, as_field="assigned_users_attr"):
+ """Attach assigned users as json column to each object of the queryset.
+
+ :param queryset: A Django user stories queryset object.
+ :param as_field: Attach assigned as an attribute with this name.
+
+ :return: Queryset object with the additional `as_field` field.
+ """
+
+ model = queryset.model
+ sql = """SELECT "userstories_userstory_assigned_users"."user_id" AS "user_id"
+ FROM "userstories_userstory_assigned_users"
+ WHERE "userstories_userstory_assigned_users"."userstory_id" = {tbl}.id"""
+
+ sql = sql.format(tbl=model._meta.db_table)
+ queryset = queryset.extra(select={as_field: sql})
+ return queryset
\ No newline at end of file
diff --git a/taiga/webhooks/serializers.py b/taiga/webhooks/serializers.py
index 04516c06..b64a97bd 100644
--- a/taiga/webhooks/serializers.py
+++ b/taiga/webhooks/serializers.py
@@ -345,6 +345,8 @@ class UserStorySerializer(CustomAttributesValuesWebhookSerializerMixin, serializ
created_date = Field()
modified_date = Field()
finish_date = Field()
+ due_date = Field()
+ due_date_reason = Field()
subject = Field()
client_requirement = Field()
team_requirement = Field()
@@ -359,6 +361,7 @@ class UserStorySerializer(CustomAttributesValuesWebhookSerializerMixin, serializ
permalink = serializers.SerializerMethodField("get_permalink")
owner = UserSerializer()
assigned_to = UserSerializer()
+ assigned_users = MethodField()
points = MethodField()
status = UserStoryStatusSerializer()
milestone = MilestoneSerializer()
@@ -369,6 +372,13 @@ class UserStorySerializer(CustomAttributesValuesWebhookSerializerMixin, serializ
def custom_attributes_queryset(self, project):
return project.userstorycustomattributes.all()
+ def get_assigned_users(self, obj):
+ """Get the assigned of an object.
+
+ :return: User queryset object representing the assigned users
+ """
+ return [user.id for user in obj.assigned_users.all()]
+
def get_watchers(self, obj):
return list(obj.get_watchers().values_list("id", flat=True))
@@ -386,6 +396,8 @@ class TaskSerializer(CustomAttributesValuesWebhookSerializerMixin, serializers.L
created_date = Field()
modified_date = Field()
finished_date = Field()
+ due_date = Field()
+ due_date_reason = Field()
subject = Field()
us_order = Field()
taskboard_order = Field()
@@ -424,6 +436,8 @@ class IssueSerializer(CustomAttributesValuesWebhookSerializerMixin, serializers.
created_date = Field()
modified_date = Field()
finished_date = Field()
+ due_date = Field()
+ due_date_reason = Field()
subject = Field()
external_reference = Field()
watchers = MethodField()
diff --git a/tests/factories.py b/tests/factories.py
index 8e98aaa9..409a818e 100644
--- a/tests/factories.py
+++ b/tests/factories.py
@@ -292,6 +292,17 @@ class UserStoryFactory(Factory):
status = factory.SubFactory("tests.factories.UserStoryStatusFactory")
milestone = factory.SubFactory("tests.factories.MilestoneFactory")
tags = factory.Faker("words")
+ due_date = factory.LazyAttribute(lambda o: date.today() + timedelta(days=7))
+ due_date_reason = factory.Faker("words")
+
+ @factory.post_generation
+ def assigned_users(self, create, users_list, **kwargs):
+ if not create:
+ return
+
+ if users_list:
+ for user in users_list:
+ self.assigned_users.add(user)
class TaskFactory(Factory):
@@ -308,6 +319,8 @@ class TaskFactory(Factory):
milestone = factory.SubFactory("tests.factories.MilestoneFactory")
user_story = factory.SubFactory("tests.factories.UserStoryFactory")
tags = factory.Faker("words")
+ due_date = factory.LazyAttribute(lambda o: date.today() + timedelta(days=7))
+ due_date_reason = factory.Faker("words")
class IssueFactory(Factory):
@@ -326,6 +339,8 @@ class IssueFactory(Factory):
type = factory.SubFactory("tests.factories.IssueTypeFactory")
milestone = factory.SubFactory("tests.factories.MilestoneFactory")
tags = factory.Faker("words")
+ due_date = factory.LazyAttribute(lambda o: date.today() + timedelta(days=7))
+ due_date_reason = factory.Faker("words")
class WikiPageFactory(Factory):
diff --git a/tests/integration/test_issues.py b/tests/integration/test_issues.py
index 8148729f..5495dd81 100644
--- a/tests/integration/test_issues.py
+++ b/tests/integration/test_issues.py
@@ -591,9 +591,9 @@ def test_custom_fields_csv_generation():
data.seek(0)
reader = csv.reader(data)
row = next(reader)
- assert row[23] == attr.name
+ assert row[25] == attr.name
row = next(reader)
- assert row[23] == "val1"
+ assert row[25] == "val1"
def test_api_validator_assigned_to_when_update_issues(client):
diff --git a/tests/integration/test_tasks.py b/tests/integration/test_tasks.py
index 12252bf7..c687ab7a 100644
--- a/tests/integration/test_tasks.py
+++ b/tests/integration/test_tasks.py
@@ -574,9 +574,9 @@ def test_custom_fields_csv_generation():
data.seek(0)
reader = csv.reader(data)
row = next(reader)
- assert row[24] == attr.name
+ assert row[26] == attr.name
row = next(reader)
- assert row[24] == "val1"
+ assert row[26] == "val1"
def test_get_tasks_including_attachments(client):
diff --git a/tests/integration/test_timeline.py b/tests/integration/test_timeline.py
index 14d84a35..971ced4d 100644
--- a/tests/integration/test_timeline.py
+++ b/tests/integration/test_timeline.py
@@ -17,6 +17,9 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see .
+import pytz
+
+from datetime import datetime, timedelta
import pytest
from .. import factories
@@ -445,10 +448,12 @@ def test_delete_membership_timeline():
def test_comment_user_story_timeline():
user_story = factories.UserStoryFactory.create(subject="test us timeline")
history_services.take_snapshot(user_story, user=user_story.owner)
- history_services.take_snapshot(user_story, user=user_story.owner, comment="testing comment")
+ history_services.take_snapshot(user_story, user=user_story.owner,
+ comment="testing comment")
project_timeline = service.get_project_timeline(user_story.project)
assert project_timeline[0].event_type == "userstories.userstory.change"
- assert project_timeline[0].data["userstory"]["subject"] == "test us timeline"
+ assert project_timeline[0].data["userstory"]["subject"] \
+ == "test us timeline"
assert project_timeline[0].data["comment"] == "testing comment"
@@ -462,13 +467,59 @@ def test_owner_user_story_timeline():
def test_assigned_to_user_story_timeline():
membership = factories.MembershipFactory.create()
- user_story = factories.UserStoryFactory.create(subject="test us timeline", assigned_to=membership.user, project=membership.project)
+ user_story = factories.UserStoryFactory.create(subject="test us timeline",
+ assigned_to=membership.user,
+ project=membership.project)
history_services.take_snapshot(user_story, user=user_story.owner)
user_timeline = service.get_profile_timeline(user_story.assigned_to)
assert user_timeline[0].event_type == "userstories.userstory.create"
assert user_timeline[0].data["userstory"]["subject"] == "test us timeline"
+def test_due_date_user_story_timeline():
+ initial_due_date = datetime.now(pytz.utc) + timedelta(days=1)
+ membership = factories.MembershipFactory.create()
+ user_story = factories.UserStoryFactory.create(subject="test us timeline",
+ due_date=initial_due_date,
+ project=membership.project)
+ history_services.take_snapshot(user_story, user=user_story.owner)
+
+ new_due_date = datetime.now(pytz.utc) + timedelta(days=3)
+ user_story.due_date = new_due_date
+ user_story.save()
+
+ history_services.take_snapshot(user_story, user=user_story.owner)
+ user_timeline = service.get_profile_timeline(user_story.owner)
+
+ assert user_timeline[0].event_type == "userstories.userstory.change"
+ assert user_timeline[0].data["values_diff"]['due_date'] == [str(initial_due_date.date()),
+ str(new_due_date.date())]
+
+
+def test_assigned_users_user_story_timeline():
+ membership = factories.MembershipFactory.create()
+ user_story = factories.UserStoryFactory.create(subject="test us timeline",
+ project=membership.project)
+ history_services.take_snapshot(user_story, user=user_story.owner)
+ user_timeline = service.get_profile_timeline(user_story.owner)
+
+ assert user_timeline[0].event_type == "userstories.userstory.create"
+ assert user_timeline[0].data["userstory"]["subject"] == "test us timeline"
+
+ user_story.assigned_to = membership.user
+ user_story.assigned_users = (membership.user,)
+ user_story.save()
+
+ history_services.take_snapshot(user_story, user=user_story.owner)
+
+ user_timeline = service.get_profile_timeline(user_story.owner)
+
+ assert user_timeline[0].event_type == "userstories.userstory.change"
+ assert "assigned_to" not in user_timeline[0].data["values_diff"].keys()
+ assert user_timeline[0].data["values_diff"]['assigned_users'] == \
+ [None, membership.user.username]
+
+
def test_user_data_for_non_system_users():
user_story = factories.UserStoryFactory.create(subject="test us timeline")
history_services.take_snapshot(user_story, user=user_story.owner)
@@ -497,9 +548,11 @@ def test_user_data_for_unactived_users():
serialized_obj.data["data"]["user"]["is_profile_visible"] = False
serialized_obj.data["data"]["user"]["username"] = "deleted-user"
+
def test_timeline_error_use_member_ids_instead_of_memberships_ids():
- user_story = factories.UserStoryFactory.create(subject="test error use member ids instead of "
- "memberships ids")
+ user_story = factories.UserStoryFactory.create(
+ subject="test error use member ids instead of "
+ "memberships ids")
member_user = user_story.owner
external_user = factories.UserFactory.create()
diff --git a/tests/integration/test_userstories.py b/tests/integration/test_userstories.py
index 35c7acdf..82979b89 100644
--- a/tests/integration/test_userstories.py
+++ b/tests/integration/test_userstories.py
@@ -69,6 +69,44 @@ def test_update_userstories_order_in_bulk():
models.UserStory)
+def test_create_userstory_with_assign_to(client):
+ user = f.UserFactory.create()
+ user_watcher = f.UserFactory.create()
+ project = f.ProjectFactory.create(owner=user)
+ f.MembershipFactory.create(project=project, user=user, is_admin=True)
+ f.MembershipFactory.create(project=project, user=user_watcher,
+ is_admin=True)
+ url = reverse("userstories-list")
+
+ data = {"subject": "Test user story", "project": project.id,
+ "assigned_to": user.id}
+ client.login(user)
+ response = client.json.post(url, json.dumps(data))
+
+ assert response.status_code == 201
+ assert response.data["assigned_to"] == user.id
+
+
+def test_create_userstory_with_assigned_users(client):
+ user = f.UserFactory.create()
+ user_watcher = f.UserFactory.create()
+ project = f.ProjectFactory.create(owner=user)
+ f.MembershipFactory.create(project=project, user=user, is_admin=True)
+ f.MembershipFactory.create(project=project, user=user_watcher,
+ is_admin=True)
+ url = reverse("userstories-list")
+
+ data = {"subject": "Test user story", "project": project.id,
+ "assigned_users": [user.id, user_watcher.id]}
+ client.login(user)
+ json_data = json.dumps(data)
+
+ response = client.json.post(url, json_data)
+
+ assert response.status_code == 201
+ assert response.data["assigned_users"] == set([user.id, user_watcher.id])
+
+
def test_create_userstory_with_watchers(client):
user = f.UserFactory.create()
user_watcher = f.UserFactory.create()
@@ -883,13 +921,16 @@ def test_get_valid_csv(client):
url = reverse("userstories-csv")
project = f.ProjectFactory.create(userstories_csv_uuid=uuid.uuid4().hex)
- response = client.get("{}?uuid={}".format(url, project.userstories_csv_uuid))
+ response = client.get(
+ "{}?uuid={}".format(url, project.userstories_csv_uuid))
assert response.status_code == 200
def test_custom_fields_csv_generation():
project = f.ProjectFactory.create(userstories_csv_uuid=uuid.uuid4().hex)
- attr = f.UserStoryCustomAttributeFactory.create(project=project, name="attr1", description="desc")
+ attr = f.UserStoryCustomAttributeFactory.create(project=project,
+ name="attr1",
+ description="desc")
us = f.UserStoryFactory.create(project=project)
attr_values = us.custom_attributes_values
attr_values.attributes_values = {str(attr.id): "val1"}
@@ -899,17 +940,20 @@ def test_custom_fields_csv_generation():
data.seek(0)
reader = csv.reader(data)
row = next(reader)
- assert row[28] == attr.name
+
+ assert row.pop() == attr.name
row = next(reader)
- assert row[28] == "val1"
+ assert row.pop() == "val1"
def test_update_userstory_respecting_watchers(client):
watching_user = f.create_user()
project = f.ProjectFactory.create()
- us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project)
+ us = f.UserStoryFactory.create(project=project, status__project=project,
+ milestone__project=project)
us.add_watcher(watching_user)
- f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True)
+ f.MembershipFactory.create(project=us.project, user=us.owner,
+ is_admin=True)
f.MembershipFactory.create(project=us.project, user=watching_user)
client.login(user=us.owner)
@@ -925,8 +969,10 @@ def test_update_userstory_respecting_watchers(client):
def test_update_userstory_update_watchers(client):
watching_user = f.create_user()
project = f.ProjectFactory.create()
- us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project)
- f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True)
+ us = f.UserStoryFactory.create(project=project, status__project=project,
+ milestone__project=project)
+ f.MembershipFactory.create(project=us.project, user=us.owner,
+ is_admin=True)
f.MembershipFactory.create(project=us.project, user=watching_user)
client.login(user=us.owner)
@@ -943,9 +989,11 @@ def test_update_userstory_update_watchers(client):
def test_update_userstory_remove_watchers(client):
watching_user = f.create_user()
project = f.ProjectFactory.create()
- us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project)
+ us = f.UserStoryFactory.create(project=project, status__project=project,
+ milestone__project=project)
us.add_watcher(watching_user)
- f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True)
+ f.MembershipFactory.create(project=us.project, user=us.owner,
+ is_admin=True)
f.MembershipFactory.create(project=us.project, user=watching_user)
client.login(user=us.owner)
@@ -961,8 +1009,10 @@ def test_update_userstory_remove_watchers(client):
def test_update_userstory_update_tribe_gig(client):
project = f.ProjectFactory.create()
- us = f.UserStoryFactory.create(project=project, status__project=project, milestone__project=project)
- f.MembershipFactory.create(project=us.project, user=us.owner, is_admin=True)
+ us = f.UserStoryFactory.create(project=project, status__project=project,
+ milestone__project=project)
+ f.MembershipFactory.create(project=us.project, user=us.owner,
+ is_admin=True)
url = reverse("userstories-detail", kwargs={"pk": us.pk})
data = {
diff --git a/tests/integration/test_webhooks_userstories.py b/tests/integration/test_webhooks_userstories.py
index 0cbfef61..67302b49 100644
--- a/tests/integration/test_webhooks_userstories.py
+++ b/tests/integration/test_webhooks_userstories.py
@@ -82,6 +82,38 @@ def test_webhooks_when_update_user_story(settings):
assert data["change"]["diff"]["subject"]["from"] != data["data"]["subject"]
+def test_webhooks_when_update_assigned_users_user_story(settings):
+ settings.WEBHOOKS_ENABLED = True
+ project = f.ProjectFactory()
+ f.WebhookFactory.create(project=project)
+ f.WebhookFactory.create(project=project)
+
+ obj = f.UserStoryFactory.create(project=project)
+
+ with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
+ services.take_snapshot(obj, user=obj.owner)
+ assert send_request_mock.call_count == 2
+
+ user = f.create_user()
+ obj.assigned_users.add(user)
+ obj.save()
+
+ with patch('taiga.webhooks.tasks._send_request') as send_request_mock:
+ services.take_snapshot(obj, user=obj.owner,)
+ assert send_request_mock.call_count == 2
+
+ (webhook_id, url, key, data) = send_request_mock.call_args[0]
+
+ assert data["action"] == "change"
+ assert data["type"] == "userstory"
+ assert data["by"]["id"] == obj.owner.id
+ assert len(data["data"]["assigned_users"]) == \
+ obj.assigned_users.count()
+ assert data["data"]["assigned_users"] == [user.id]
+ assert not data["change"]["diff"]["assigned_users"]["from"]
+ assert data["change"]["diff"]["assigned_users"]["to"] == user.username
+
+
def test_webhooks_when_delete_user_story(settings):
settings.WEBHOOKS_ENABLED = True
project = f.ProjectFactory()
diff --git a/tests/unit/test_due_date_serializers.py b/tests/unit/test_due_date_serializers.py
new file mode 100644
index 00000000..075d0b21
--- /dev/null
+++ b/tests/unit/test_due_date_serializers.py
@@ -0,0 +1,22 @@
+import datetime as dt
+from unittest import mock
+
+import pytest
+
+from django.utils import timezone
+
+from taiga.projects.due_dates.serializers import DueDateSerializerMixin
+
+@pytest.mark.parametrize('due_date, is_closed, expected', [
+ (None, False, 'not_set'),
+ (dt.date(2100, 1, 1), True, 'no_longer_applicable'),
+ (dt.date(2100, 12, 31), False, 'set'),
+ (dt.date(2000, 1, 1), False, 'past_due'),
+ (timezone.now().date(), False, 'due_soon'),
+])
+def test_due_date_status(due_date, is_closed, expected):
+ serializer = DueDateSerializerMixin()
+ obj_status = mock.MagicMock(is_closed=is_closed)
+ obj = mock.MagicMock(due_date=due_date, status=obj_status)
+ status = serializer.get_due_date_status(obj)
+ assert status == expected