Merge branch 'master' into stable
commit
9e9c302ada
|
@ -20,10 +20,7 @@ answer newbie questions, and generally made taiga that much better:
|
|||
- Andrea Stagi <stagi.andrea@gmail.com>
|
||||
- Andrés Moya <andres.moya@kaleidos.net>
|
||||
- Andrey Alekseenko <al42and@gmail.com>
|
||||
<<<<<<< HEAD
|
||||
=======
|
||||
- Brett Profitt <brett.profitt@gmail.com>
|
||||
>>>>>>> master
|
||||
- Bruno Clermont <bruno@robotinfra.com>
|
||||
- Chris Wilson <chris.wilson@aridhia.com>
|
||||
- David Burke <david@burkesoftware.com>
|
||||
|
@ -32,7 +29,10 @@ answer newbie questions, and generally made taiga that much better:
|
|||
- Joe Letts
|
||||
- Julien Palard
|
||||
- luyikei <luyikei.qmltu@gmail.com>
|
||||
- Michael Jurke <m.jurke@gmx.de>
|
||||
- Motius GmbH <mail@motius.de>
|
||||
- Riccardo Coccioli <riccardo.coccioli@immobiliare.it>
|
||||
- Ricky Posner <e@eposner.com>
|
||||
- Stefan Auditor <stefan.auditor@erdfisch.de>
|
||||
- Yamila Moreno <yamila.moreno@kaleidos.net>
|
||||
- Yaser Alraddadi <yaser@yr.sa>
|
||||
|
|
44
CHANGELOG.md
44
CHANGELOG.md
|
@ -1,9 +1,51 @@
|
|||
# Changelog #
|
||||
|
||||
|
||||
## 3.0.0 Stellaria Borealis (2016-10-02)
|
||||
|
||||
### Features
|
||||
- Add Epics.
|
||||
- Include created, modified and finished dates for tasks in CSV reports.
|
||||
- Add gravatar url to Users API endpoint.
|
||||
- ProjectTemplates now are sorted by the attribute 'order'.
|
||||
- Create enpty wiki pages (if not exist) when a new link is created.
|
||||
- Diff messages in history entries now show only the relevant changes (with some context).
|
||||
- User stories and tasks listing API call support extra params to include more data (tasks and attachemnts and attachments, respectively)
|
||||
- Comments:
|
||||
- Now comment owners and project admins can edit existing comments with the history Entry endpoint.
|
||||
- Add a new permissions to allow add comments instead of use the existent modify permission for this purpose.
|
||||
- Tags:
|
||||
- New API endpoints over projects to create, rename, edit, delete and mix tags.
|
||||
- Tag color assignation is not automatic.
|
||||
- Select a color (or not) to a tag when add it to stories, issues and tasks.
|
||||
- Improve search system over stories, tasks and issues:
|
||||
- Search into tags too. (thanks to [Riccardo Cocciol](https://github.com/volans-))
|
||||
- Weights are applied: (subject = ref > tags > description).
|
||||
- Import/Export:
|
||||
- Gzip export/import support.
|
||||
- Export performance improvements.
|
||||
- Add filter by email domain registration and invitation by setting.
|
||||
- Third party integrations:
|
||||
- Included gogs as builtin integration.
|
||||
- Improve messages generated on webhooks input.
|
||||
- Add mentions support in commit messages.
|
||||
- Cleanup hooks code.
|
||||
- Rework webhook signature header to align with larger implementations and defined [standards](https://superfeedr-misc.s3.amazonaws.com/pubsubhubbub-core-0.4.html\#authednotify). (thanks to [Stefan Auditor](https://github.com/sanduhrs))
|
||||
- Add created-, modified-, finished- and finish_date queryset filters
|
||||
- Support exact match, gt, gte, lt, lte
|
||||
- added issues, tasks and userstories accordingly
|
||||
- i18n:
|
||||
- Add norwegian Bokmal (nb) translation.
|
||||
|
||||
### Misc
|
||||
- [API] Improve performance of some calls over list.
|
||||
- Lots of small and not so small bugfixes.
|
||||
|
||||
|
||||
## 2.1.0 Ursus Americanus (2016-05-03)
|
||||
|
||||
### Features
|
||||
- Add sprint name and slug on search results for user stories ((thanks to [@everblut](https://github.com/everblut)))
|
||||
- Add sprint name and slug on search results for user stories (thanks to [@everblut](https://github.com/everblut))
|
||||
- [API] projects resource: Random order if `discover_mode=true` and `is_featured=true`.
|
||||
- Webhooks: Improve webhook data:
|
||||
- add permalinks
|
||||
|
|
|
@ -10,7 +10,7 @@ six==1.10.0
|
|||
amqp==1.4.9
|
||||
djmail==0.12.0.post1
|
||||
django-pgjson==0.3.1
|
||||
djorm-pgarray==1.2
|
||||
djorm-pgarray==1.2 # Use until Taiga 2.1. Keep compatibility with old migrations
|
||||
django-jinja==2.1.2
|
||||
jinja2==2.8
|
||||
pygments==2.0.2
|
||||
|
@ -28,9 +28,10 @@ raven==5.10.2
|
|||
bleach==1.4.3
|
||||
django-ipware==1.1.3
|
||||
premailer==2.9.7
|
||||
cssutils==1.0.1 # Compatible with python 3.5
|
||||
cssutils==1.0.1 # Compatible with python 3.5
|
||||
lxml==3.5.0
|
||||
git+https://github.com/Xof/django-pglocks.git@dbb8d7375066859f897604132bd437832d2014ea
|
||||
pyjwkest==1.1.5
|
||||
python-dateutil==2.4.2
|
||||
netaddr==0.7.18
|
||||
serpy==0.1.1
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
python ./manage.py dumpdata --format json \
|
||||
--indent 4 \
|
||||
--output './taiga/projects/fixtures/initial_project_templates.json' \
|
||||
'projects.ProjectTemplate'
|
|
@ -124,7 +124,7 @@ LANGUAGES = [
|
|||
#("mn", "Монгол"), # Mongolian
|
||||
#("mr", "मराठी"), # Marathi
|
||||
#("my", "မြန်မာ"), # Burmese
|
||||
#("nb", "Norsk (bokmål)"), # Norwegian Bokmal
|
||||
("nb", "Norsk (bokmål)"), # Norwegian Bokmal
|
||||
#("ne", "नेपाली"), # Nepali
|
||||
("nl", "Nederlands"), # Dutch
|
||||
#("nn", "Norsk (nynorsk)"), # Norwegian Nynorsk
|
||||
|
@ -300,6 +300,7 @@ INSTALLED_APPS = [
|
|||
"taiga.projects.likes",
|
||||
"taiga.projects.votes",
|
||||
"taiga.projects.milestones",
|
||||
"taiga.projects.epics",
|
||||
"taiga.projects.userstories",
|
||||
"taiga.projects.tasks",
|
||||
"taiga.projects.issues",
|
||||
|
@ -313,6 +314,7 @@ INSTALLED_APPS = [
|
|||
"taiga.hooks.github",
|
||||
"taiga.hooks.gitlab",
|
||||
"taiga.hooks.bitbucket",
|
||||
"taiga.hooks.gogs",
|
||||
"taiga.webhooks",
|
||||
|
||||
"djmail",
|
||||
|
@ -436,11 +438,14 @@ APP_EXTRA_EXPOSE_HEADERS = [
|
|||
"taiga-info-total-opened-milestones",
|
||||
"taiga-info-total-closed-milestones",
|
||||
"taiga-info-project-memberships",
|
||||
"taiga-info-project-is-private"
|
||||
"taiga-info-project-is-private",
|
||||
"taiga-info-order-updated"
|
||||
]
|
||||
|
||||
DEFAULT_PROJECT_TEMPLATE = "scrum"
|
||||
PUBLIC_REGISTER_ENABLED = False
|
||||
# None or [] values in USER_EMAIL_ALLOWED_DOMAINS means allow any domain
|
||||
USER_EMAIL_ALLOWED_DOMAINS = None
|
||||
|
||||
SEARCHES_MAX_RESULTS = 150
|
||||
|
||||
|
@ -477,10 +482,6 @@ THUMBNAIL_ALIASES = {
|
|||
},
|
||||
}
|
||||
|
||||
# GRAVATAR_DEFAULT_AVATAR = "img/user-noimage.png"
|
||||
GRAVATAR_DEFAULT_AVATAR = ""
|
||||
GRAVATAR_AVATAR_SIZE = THN_AVATAR_SIZE
|
||||
|
||||
TAGS_PREDEFINED_COLORS = ["#fce94f", "#edd400", "#c4a000", "#8ae234",
|
||||
"#73d216", "#4e9a06", "#d3d7cf", "#fcaf3e",
|
||||
"#f57900", "#ce5c00", "#729fcf", "#3465a4",
|
||||
|
@ -508,6 +509,7 @@ PROJECT_MODULES_CONFIGURATORS = {
|
|||
"github": "taiga.hooks.github.services.get_or_generate_config",
|
||||
"gitlab": "taiga.hooks.gitlab.services.get_or_generate_config",
|
||||
"bitbucket": "taiga.hooks.bitbucket.services.get_or_generate_config",
|
||||
"gogs": "taiga.hooks.gogs.services.get_or_generate_config",
|
||||
}
|
||||
|
||||
BITBUCKET_VALID_ORIGIN_IPS = ["131.103.20.165", "131.103.20.166", "104.192.143.192/28", "104.192.143.208/28"]
|
||||
|
|
|
@ -18,6 +18,10 @@
|
|||
|
||||
from .development import *
|
||||
|
||||
#########################################
|
||||
## GENERIC
|
||||
#########################################
|
||||
|
||||
#DEBUG = False
|
||||
|
||||
#ADMINS = (
|
||||
|
@ -54,6 +58,25 @@ DATABASES = {
|
|||
#STATIC_ROOT = '/home/taiga/static'
|
||||
|
||||
|
||||
#########################################
|
||||
## THROTTLING
|
||||
#########################################
|
||||
|
||||
#REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"] = {
|
||||
# "anon": "20/min",
|
||||
# "user": "200/min",
|
||||
# "import-mode": "20/sec",
|
||||
# "import-dump-mode": "1/minute"
|
||||
#}
|
||||
|
||||
|
||||
#########################################
|
||||
## MAIL SYSTEM SETTINGS
|
||||
#########################################
|
||||
|
||||
#DEFAULT_FROM_EMAIL = "john@doe.com"
|
||||
#CHANGE_NOTIFICATIONS_MIN_INTERVAL = 300 #seconds
|
||||
|
||||
# EMAIL SETTINGS EXAMPLE
|
||||
#EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||
#EMAIL_USE_TLS = False
|
||||
|
@ -61,7 +84,6 @@ DATABASES = {
|
|||
#EMAIL_PORT = 25
|
||||
#EMAIL_HOST_USER = 'user'
|
||||
#EMAIL_HOST_PASSWORD = 'password'
|
||||
#DEFAULT_FROM_EMAIL = "john@doe.com"
|
||||
|
||||
# GMAIL SETTINGS EXAMPLE
|
||||
#EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
||||
|
@ -71,13 +93,22 @@ DATABASES = {
|
|||
#EMAIL_HOST_USER = 'youremail@gmail.com'
|
||||
#EMAIL_HOST_PASSWORD = 'yourpassword'
|
||||
|
||||
# THROTTLING
|
||||
#REST_FRAMEWORK["DEFAULT_THROTTLE_RATES"] = {
|
||||
# "anon": "20/min",
|
||||
# "user": "200/min",
|
||||
# "import-mode": "20/sec",
|
||||
# "import-dump-mode": "1/minute"
|
||||
#}
|
||||
|
||||
#########################################
|
||||
## REGISTRATION
|
||||
#########################################
|
||||
|
||||
#PUBLIC_REGISTER_ENABLED = True
|
||||
|
||||
# LIMIT ALLOWED DOMAINS FOR REGISTER AND INVITE
|
||||
# None or [] values in USER_EMAIL_ALLOWED_DOMAINS means allow any domain
|
||||
#USER_EMAIL_ALLOWED_DOMAINS = None
|
||||
|
||||
# PUCLIC OR PRIVATE NUMBER OF PROJECT PER USER
|
||||
#MAX_PRIVATE_PROJECTS_PER_USER = None # None == no limit
|
||||
#MAX_PUBLIC_PROJECTS_PER_USER = None # None == no limit
|
||||
#MAX_MEMBERSHIPS_PRIVATE_PROJECTS = None # None == no limit
|
||||
#MAX_MEMBERSHIPS_PUBLIC_PROJECTS = None # None == no limit
|
||||
|
||||
# GITHUB SETTINGS
|
||||
#GITHUB_URL = "https://github.com/"
|
||||
|
@ -85,20 +116,37 @@ DATABASES = {
|
|||
#GITHUB_API_CLIENT_ID = "yourgithubclientid"
|
||||
#GITHUB_API_CLIENT_SECRET = "yourgithubclientsecret"
|
||||
|
||||
# FEEDBACK MODULE (See config in taiga-front too)
|
||||
#FEEDBACK_ENABLED = True
|
||||
#FEEDBACK_EMAIL = "support@taiga.io"
|
||||
|
||||
# STATS MODULE
|
||||
#STATS_ENABLED = False
|
||||
#FRONT_SITEMAP_CACHE_TIMEOUT = 60*60 # In second
|
||||
#########################################
|
||||
## SITEMAP
|
||||
#########################################
|
||||
|
||||
# SITEMAP
|
||||
# If is True /front/sitemap.xml show a valid sitemap of taiga-front client
|
||||
#FRONT_SITEMAP_ENABLED = False
|
||||
#FRONT_SITEMAP_CACHE_TIMEOUT = 24*60*60 # In second
|
||||
|
||||
# CELERY
|
||||
|
||||
#########################################
|
||||
## FEEDBACK
|
||||
#########################################
|
||||
|
||||
# Note: See config in taiga-front too
|
||||
#FEEDBACK_ENABLED = True
|
||||
#FEEDBACK_EMAIL = "support@taiga.io"
|
||||
|
||||
|
||||
#########################################
|
||||
## STATS
|
||||
#########################################
|
||||
|
||||
#STATS_ENABLED = False
|
||||
#FRONT_SITEMAP_CACHE_TIMEOUT = 60*60 # In second
|
||||
|
||||
|
||||
#########################################
|
||||
## CELERY
|
||||
#########################################
|
||||
|
||||
#from .celery import *
|
||||
#CELERY_ENABLED = True
|
||||
#
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
[flake8]
|
||||
ignore = E41,E266
|
||||
max-line-length = 120
|
||||
exclude =
|
||||
.git,
|
||||
*__pycache__*,
|
||||
*tests*,
|
||||
*scripts*,
|
||||
*migrations*,
|
||||
*management*
|
||||
max-complexity = 10
|
|
@ -22,15 +22,16 @@ from enum import Enum
|
|||
from django.utils.translation import ugettext as _
|
||||
from django.conf import settings
|
||||
|
||||
from taiga.base.api import validators
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.api import viewsets
|
||||
from taiga.base.decorators import list_route
|
||||
from taiga.base import exceptions as exc
|
||||
from taiga.base import response
|
||||
|
||||
from .serializers import PublicRegisterSerializer
|
||||
from .serializers import PrivateRegisterForExistingUserSerializer
|
||||
from .serializers import PrivateRegisterForNewUserSerializer
|
||||
from .validators import PublicRegisterValidator
|
||||
from .validators import PrivateRegisterForExistingUserValidator
|
||||
from .validators import PrivateRegisterForNewUserValidator
|
||||
|
||||
from .services import private_register_for_existing_user
|
||||
from .services import private_register_for_new_user
|
||||
|
@ -44,7 +45,7 @@ from .permissions import AuthPermission
|
|||
def _parse_data(data:dict, *, cls):
|
||||
"""
|
||||
Generic function for parse user data using
|
||||
specified serializer on `cls` keyword parameter.
|
||||
specified validator on `cls` keyword parameter.
|
||||
|
||||
Raises: RequestValidationError exception if
|
||||
some errors found when data is validated.
|
||||
|
@ -52,21 +53,21 @@ def _parse_data(data:dict, *, cls):
|
|||
Returns the parsed data.
|
||||
"""
|
||||
|
||||
serializer = cls(data=data)
|
||||
if not serializer.is_valid():
|
||||
raise exc.RequestValidationError(serializer.errors)
|
||||
return serializer.data
|
||||
validator = cls(data=data)
|
||||
if not validator.is_valid():
|
||||
raise exc.RequestValidationError(validator.errors)
|
||||
return validator.data
|
||||
|
||||
# Parse public register data
|
||||
parse_public_register_data = partial(_parse_data, cls=PublicRegisterSerializer)
|
||||
parse_public_register_data = partial(_parse_data, cls=PublicRegisterValidator)
|
||||
|
||||
# Parse private register data for existing user
|
||||
parse_private_register_for_existing_user_data = \
|
||||
partial(_parse_data, cls=PrivateRegisterForExistingUserSerializer)
|
||||
partial(_parse_data, cls=PrivateRegisterForExistingUserValidator)
|
||||
|
||||
# Parse private register data for new user
|
||||
parse_private_register_for_new_user_data = \
|
||||
partial(_parse_data, cls=PrivateRegisterForNewUserSerializer)
|
||||
partial(_parse_data, cls=PrivateRegisterForNewUserValidator)
|
||||
|
||||
|
||||
class RegisterTypeEnum(Enum):
|
||||
|
@ -81,10 +82,10 @@ def parse_register_type(userdata:dict) -> str:
|
|||
"""
|
||||
# Create adhoc inner serializer for avoid parse
|
||||
# manually the user data.
|
||||
class _serializer(serializers.Serializer):
|
||||
class _validator(validators.Validator):
|
||||
existing = serializers.BooleanField()
|
||||
|
||||
instance = _serializer(data=userdata)
|
||||
instance = _validator(data=userdata)
|
||||
if not instance.is_valid():
|
||||
raise exc.RequestValidationError(instance.errors)
|
||||
|
||||
|
|
|
@ -16,16 +16,17 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.core import validators
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core import validators as core_validators
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.api import validators
|
||||
from taiga.base.exceptions import ValidationError
|
||||
|
||||
import re
|
||||
|
||||
|
||||
class BaseRegisterSerializer(serializers.Serializer):
|
||||
class BaseRegisterValidator(validators.Validator):
|
||||
full_name = serializers.CharField(max_length=256)
|
||||
email = serializers.EmailField(max_length=255)
|
||||
username = serializers.CharField(max_length=255)
|
||||
|
@ -33,25 +34,25 @@ class BaseRegisterSerializer(serializers.Serializer):
|
|||
|
||||
def validate_username(self, attrs, source):
|
||||
value = attrs[source]
|
||||
validator = validators.RegexValidator(re.compile('^[\w.-]+$'), _("invalid username"), "invalid")
|
||||
validator = core_validators.RegexValidator(re.compile('^[\w.-]+$'), _("invalid username"), "invalid")
|
||||
|
||||
try:
|
||||
validator(value)
|
||||
except ValidationError:
|
||||
raise serializers.ValidationError(_("Required. 255 characters or fewer. Letters, numbers "
|
||||
"and /./-/_ characters'"))
|
||||
raise ValidationError(_("Required. 255 characters or fewer. Letters, numbers "
|
||||
"and /./-/_ characters'"))
|
||||
return attrs
|
||||
|
||||
|
||||
class PublicRegisterSerializer(BaseRegisterSerializer):
|
||||
class PublicRegisterValidator(BaseRegisterValidator):
|
||||
pass
|
||||
|
||||
|
||||
class PrivateRegisterForNewUserSerializer(BaseRegisterSerializer):
|
||||
class PrivateRegisterForNewUserValidator(BaseRegisterValidator):
|
||||
token = serializers.CharField(max_length=255, required=True)
|
||||
|
||||
|
||||
class PrivateRegisterForExistingUserSerializer(serializers.Serializer):
|
||||
class PrivateRegisterForExistingUserValidator(validators.Validator):
|
||||
username = serializers.CharField(max_length=255)
|
||||
password = serializers.CharField(min_length=4)
|
||||
token = serializers.CharField(max_length=255, required=True)
|
|
@ -50,7 +50,6 @@ They are very similar to Django's form fields.
|
|||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.core import validators
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.models.fields import BLANK_CHOICE_DASH
|
||||
from django.forms import widgets
|
||||
from django.http import QueryDict
|
||||
|
@ -66,6 +65,8 @@ from django.utils.functional import Promise
|
|||
from django.utils.translation import ugettext
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from taiga.base.exceptions import ValidationError
|
||||
|
||||
from . import ISO_8601
|
||||
from .settings import api_settings
|
||||
|
||||
|
@ -611,6 +612,15 @@ class ChoiceField(WritableField):
|
|||
return value
|
||||
|
||||
|
||||
def validate_user_email_allowed_domains(value):
|
||||
validators.validate_email(value)
|
||||
|
||||
domain_name = value.split("@")[1]
|
||||
|
||||
if settings.USER_EMAIL_ALLOWED_DOMAINS and domain_name not in settings.USER_EMAIL_ALLOWED_DOMAINS:
|
||||
raise ValidationError(_("You email domain is not allowed"))
|
||||
|
||||
|
||||
class EmailField(CharField):
|
||||
type_name = "EmailField"
|
||||
type_label = "email"
|
||||
|
@ -619,7 +629,7 @@ class EmailField(CharField):
|
|||
default_error_messages = {
|
||||
"invalid": _("Enter a valid email address."),
|
||||
}
|
||||
default_validators = [validators.validate_email]
|
||||
default_validators = [validate_user_email_allowed_domains]
|
||||
|
||||
def from_native(self, value):
|
||||
ret = super(EmailField, self).from_native(value)
|
||||
|
|
|
@ -62,6 +62,7 @@ class GenericAPIView(pagination.PaginationMixin,
|
|||
# or override `get_queryset()`/`get_serializer_class()`.
|
||||
queryset = None
|
||||
serializer_class = None
|
||||
validator_class = None
|
||||
|
||||
# This shortcut may be used instead of setting either or both
|
||||
# of the `queryset`/`serializer_class` attributes, although using
|
||||
|
@ -79,6 +80,7 @@ class GenericAPIView(pagination.PaginationMixin,
|
|||
# The following attributes may be subject to change,
|
||||
# and should be considered private API.
|
||||
model_serializer_class = api_settings.DEFAULT_MODEL_SERIALIZER_CLASS
|
||||
model_validator_class = api_settings.DEFAULT_MODEL_VALIDATOR_CLASS
|
||||
|
||||
######################################
|
||||
# These are pending deprecation...
|
||||
|
@ -88,7 +90,7 @@ class GenericAPIView(pagination.PaginationMixin,
|
|||
slug_field = 'slug'
|
||||
allow_empty = True
|
||||
|
||||
def get_serializer_context(self):
|
||||
def get_extra_context(self):
|
||||
"""
|
||||
Extra context provided to the serializer class.
|
||||
"""
|
||||
|
@ -101,14 +103,24 @@ class GenericAPIView(pagination.PaginationMixin,
|
|||
def get_serializer(self, instance=None, data=None,
|
||||
files=None, many=False, partial=False):
|
||||
"""
|
||||
Return the serializer instance that should be used for validating and
|
||||
deserializing input, and for serializing output.
|
||||
Return the serializer instance that should be used for deserializing
|
||||
input, and for serializing output.
|
||||
"""
|
||||
serializer_class = self.get_serializer_class()
|
||||
context = self.get_serializer_context()
|
||||
context = self.get_extra_context()
|
||||
return serializer_class(instance, data=data, files=files,
|
||||
many=many, partial=partial, context=context)
|
||||
|
||||
def get_validator(self, instance=None, data=None,
|
||||
files=None, many=False, partial=False):
|
||||
"""
|
||||
Return the validator instance that should be used for validating the
|
||||
input, and for serializing output.
|
||||
"""
|
||||
validator_class = self.get_validator_class()
|
||||
context = self.get_extra_context()
|
||||
return validator_class(instance, data=data, files=files,
|
||||
many=many, partial=partial, context=context)
|
||||
|
||||
def filter_queryset(self, queryset, filter_backends=None):
|
||||
"""
|
||||
|
@ -119,7 +131,7 @@ class GenericAPIView(pagination.PaginationMixin,
|
|||
method if you want to apply the configured filtering backend to the
|
||||
default queryset.
|
||||
"""
|
||||
#NOTE TAIGA: Added filter_backends to overwrite the default behavior.
|
||||
# NOTE TAIGA: Added filter_backends to overwrite the default behavior.
|
||||
|
||||
backends = filter_backends or self.get_filter_backends()
|
||||
for backend in backends:
|
||||
|
@ -160,6 +172,22 @@ class GenericAPIView(pagination.PaginationMixin,
|
|||
model = self.model
|
||||
return DefaultSerializer
|
||||
|
||||
def get_validator_class(self):
|
||||
validator_class = self.validator_class
|
||||
serializer_class = self.get_serializer_class()
|
||||
|
||||
# Situations where the validator is the rest framework serializer
|
||||
if validator_class is None and serializer_class is not None:
|
||||
return serializer_class
|
||||
|
||||
if validator_class is not None:
|
||||
return validator_class
|
||||
|
||||
class DefaultValidator(self.model_validator_class):
|
||||
class Meta:
|
||||
model = self.model
|
||||
return DefaultValidator
|
||||
|
||||
def get_queryset(self):
|
||||
"""
|
||||
Get the list of items for this view.
|
||||
|
|
|
@ -44,12 +44,12 @@
|
|||
|
||||
import warnings
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.http import Http404
|
||||
from django.db import transaction as tx
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base import response
|
||||
from taiga.base.exceptions import ValidationError
|
||||
|
||||
from .settings import api_settings
|
||||
from .utils import get_object_or_404
|
||||
|
@ -57,6 +57,7 @@ from .utils import get_object_or_404
|
|||
from .. import exceptions as exc
|
||||
from ..decorators import model_pk_lock
|
||||
|
||||
|
||||
def _get_validation_exclusions(obj, pk=None, slug_field=None, lookup_field=None):
|
||||
"""
|
||||
Given a model instance, and an optional pk and slug field,
|
||||
|
@ -89,19 +90,21 @@ class CreateModelMixin:
|
|||
Create a model instance.
|
||||
"""
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.DATA, files=request.FILES)
|
||||
validator = self.get_validator(data=request.DATA, files=request.FILES)
|
||||
|
||||
if serializer.is_valid():
|
||||
self.check_permissions(request, 'create', serializer.object)
|
||||
if validator.is_valid():
|
||||
self.check_permissions(request, 'create', validator.object)
|
||||
|
||||
self.pre_save(serializer.object)
|
||||
self.pre_conditions_on_save(serializer.object)
|
||||
self.object = serializer.save(force_insert=True)
|
||||
self.pre_save(validator.object)
|
||||
self.pre_conditions_on_save(validator.object)
|
||||
self.object = validator.save(force_insert=True)
|
||||
self.post_save(self.object, created=True)
|
||||
instance = self.get_queryset().get(id=self.object.id)
|
||||
serializer = self.get_serializer(instance)
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
return response.Created(serializer.data, headers=headers)
|
||||
|
||||
return response.BadRequest(serializer.errors)
|
||||
return response.BadRequest(validator.errors)
|
||||
|
||||
def get_success_headers(self, data):
|
||||
try:
|
||||
|
@ -171,28 +174,32 @@ class UpdateModelMixin:
|
|||
if self.object is None:
|
||||
raise Http404
|
||||
|
||||
serializer = self.get_serializer(self.object, data=request.DATA,
|
||||
files=request.FILES, partial=partial)
|
||||
validator = self.get_validator(self.object, data=request.DATA,
|
||||
files=request.FILES, partial=partial)
|
||||
|
||||
if not serializer.is_valid():
|
||||
return response.BadRequest(serializer.errors)
|
||||
if not validator.is_valid():
|
||||
return response.BadRequest(validator.errors)
|
||||
|
||||
# Hooks
|
||||
try:
|
||||
self.pre_save(serializer.object)
|
||||
self.pre_conditions_on_save(serializer.object)
|
||||
self.pre_save(validator.object)
|
||||
self.pre_conditions_on_save(validator.object)
|
||||
except ValidationError as err:
|
||||
# full_clean on model instance may be called in pre_save,
|
||||
# so we have to handle eventual errors.
|
||||
return response.BadRequest(err.message_dict)
|
||||
|
||||
if self.object is None:
|
||||
self.object = serializer.save(force_insert=True)
|
||||
self.object = validator.save(force_insert=True)
|
||||
self.post_save(self.object, created=True)
|
||||
instance = self.get_queryset().get(id=self.object.id)
|
||||
serializer = self.get_serializer(instance)
|
||||
return response.Created(serializer.data)
|
||||
|
||||
self.object = serializer.save(force_update=True)
|
||||
self.object = validator.save(force_update=True)
|
||||
self.post_save(self.object, created=False)
|
||||
instance = self.get_queryset().get(id=self.object.id)
|
||||
serializer = self.get_serializer(instance)
|
||||
return response.Ok(serializer.data)
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
|
@ -204,14 +211,14 @@ class UpdateModelMixin:
|
|||
Set any attributes on the object that are implicit in the request.
|
||||
"""
|
||||
# pk and/or slug attributes are implicit in the URL.
|
||||
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
|
||||
lookup = self.kwargs.get(lookup_url_kwarg, None)
|
||||
##lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
|
||||
##lookup = self.kwargs.get(lookup_url_kwarg, None)
|
||||
pk = self.kwargs.get(self.pk_url_kwarg, None)
|
||||
slug = self.kwargs.get(self.slug_url_kwarg, None)
|
||||
slug_field = slug and self.slug_field or None
|
||||
|
||||
if lookup:
|
||||
setattr(obj, self.lookup_field, lookup)
|
||||
##if lookup:
|
||||
## setattr(obj, self.lookup_field, lookup)
|
||||
|
||||
if pk:
|
||||
setattr(obj, 'pk', pk)
|
||||
|
@ -246,12 +253,33 @@ class DestroyModelMixin:
|
|||
return response.NoContent()
|
||||
|
||||
|
||||
class NestedViewSetMixin(object):
|
||||
def get_queryset(self):
|
||||
return self._filter_queryset_by_parents_lookups(super().get_queryset())
|
||||
|
||||
def _filter_queryset_by_parents_lookups(self, queryset):
|
||||
parents_query_dict = self._get_parents_query_dict()
|
||||
if parents_query_dict:
|
||||
return queryset.filter(**parents_query_dict)
|
||||
else:
|
||||
return queryset
|
||||
|
||||
def _get_parents_query_dict(self):
|
||||
result = {}
|
||||
for kwarg_name in self.kwargs:
|
||||
query_value = self.kwargs.get(kwarg_name)
|
||||
result[kwarg_name] = query_value
|
||||
return result
|
||||
|
||||
|
||||
## TODO: Move blocked mixind out of the base module because is related to project
|
||||
|
||||
class BlockeableModelMixin:
|
||||
def is_blocked(self, obj):
|
||||
raise NotImplementedError("is_blocked must be overridden")
|
||||
|
||||
def pre_conditions_blocked(self, obj):
|
||||
#Raises permission exception
|
||||
# Raises permission exception
|
||||
if obj is not None and self.is_blocked(obj):
|
||||
raise exc.Blocked(_("Blocked element"))
|
||||
|
||||
|
|
|
@ -21,11 +21,12 @@ import abc
|
|||
from functools import reduce
|
||||
|
||||
from taiga.base.utils import sequence as sq
|
||||
from taiga.permissions.service import user_has_perm, is_project_admin
|
||||
from taiga.permissions.services import user_has_perm, is_project_admin
|
||||
from django.apps import apps
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
|
||||
######################################################################
|
||||
# Base permissiones definition
|
||||
######################################################################
|
||||
|
@ -180,33 +181,6 @@ class HasProjectPerm(PermissionComponent):
|
|||
return user_has_perm(request.user, self.project_perm, obj)
|
||||
|
||||
|
||||
class HasProjectParamAndPerm(PermissionComponent):
|
||||
def __init__(self, perm, *components):
|
||||
self.project_perm = perm
|
||||
super().__init__(*components)
|
||||
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
Project = apps.get_model('projects', 'Project')
|
||||
project_id = request.QUERY_PARAMS.get("project", None)
|
||||
try:
|
||||
project = Project.objects.get(pk=project_id)
|
||||
except Project.DoesNotExist:
|
||||
return False
|
||||
return user_has_perm(request.user, self.project_perm, project)
|
||||
|
||||
|
||||
class HasMandatoryParam(PermissionComponent):
|
||||
def __init__(self, param, *components):
|
||||
self.mandatory_param = param
|
||||
super().__init__(*components)
|
||||
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
param = request.GET.get(self.mandatory_param, None)
|
||||
if param:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class IsProjectAdmin(PermissionComponent):
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
return is_project_admin(request.user, obj)
|
||||
|
@ -214,6 +188,9 @@ class IsProjectAdmin(PermissionComponent):
|
|||
|
||||
class IsObjectOwner(PermissionComponent):
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
if obj.owner is None:
|
||||
return False
|
||||
|
||||
return obj.owner == request.user
|
||||
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ Serializer fields that deal with relationships.
|
|||
These fields allow you to specify the style that should be used to represent
|
||||
model relationships, including hyperlinks, primary keys, or slugs.
|
||||
"""
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.urlresolvers import resolve, get_script_prefix, NoReverseMatch
|
||||
from django import forms
|
||||
from django.db.models.fields import BLANK_CHOICE_DASH
|
||||
|
@ -59,6 +59,7 @@ from django.utils.translation import ugettext_lazy as _
|
|||
|
||||
from .fields import Field, WritableField, get_component, is_simple_callable
|
||||
from .reverse import reverse
|
||||
from taiga.base.exceptions import ValidationError
|
||||
|
||||
import warnings
|
||||
from urllib import parse as urlparse
|
||||
|
|
|
@ -69,6 +69,7 @@ import copy
|
|||
import datetime
|
||||
import inspect
|
||||
import types
|
||||
import serpy
|
||||
|
||||
# Note: We do the following so that users of the framework can use this style:
|
||||
#
|
||||
|
@ -77,6 +78,8 @@ import types
|
|||
# This helps keep the separation between model fields, form fields, and
|
||||
# serializer fields more explicit.
|
||||
|
||||
from taiga.base.exceptions import ValidationError
|
||||
|
||||
from .relations import *
|
||||
from .fields import *
|
||||
|
||||
|
@ -1220,3 +1223,27 @@ class HyperlinkedModelSerializer(ModelSerializer):
|
|||
"model_name": model_meta.object_name.lower()
|
||||
}
|
||||
return self._default_view_name % format_kwargs
|
||||
|
||||
|
||||
class LightSerializer(serpy.Serializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.pop("read_only", None)
|
||||
kwargs.pop("partial", None)
|
||||
kwargs.pop("files", None)
|
||||
context = kwargs.pop("context", {})
|
||||
view = kwargs.pop("view", {})
|
||||
super().__init__(*args, **kwargs)
|
||||
self.context = context
|
||||
self.view = view
|
||||
|
||||
|
||||
class LightDictSerializer(serpy.DictSerializer):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.pop("read_only", None)
|
||||
kwargs.pop("partial", None)
|
||||
kwargs.pop("files", None)
|
||||
context = kwargs.pop("context", {})
|
||||
view = kwargs.pop("view", {})
|
||||
super().__init__(*args, **kwargs)
|
||||
self.context = context
|
||||
self.view = view
|
||||
|
|
|
@ -98,6 +98,8 @@ DEFAULTS = {
|
|||
# Genric view behavior
|
||||
"DEFAULT_MODEL_SERIALIZER_CLASS":
|
||||
"taiga.base.api.serializers.ModelSerializer",
|
||||
"DEFAULT_MODEL_VALIDATOR_CLASS":
|
||||
"taiga.base.api.validators.ModelValidator",
|
||||
"DEFAULT_FILTER_BACKENDS": (),
|
||||
|
||||
# Throttling
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
|
@ -15,12 +16,12 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from taiga.permissions import service
|
||||
from taiga.users.models import Role
|
||||
from . import serializers
|
||||
|
||||
|
||||
def test_role_has_perm():
|
||||
role = Role()
|
||||
role.permissions = ["test"]
|
||||
assert service.role_has_perm(role, "test")
|
||||
assert service.role_has_perm(role, "false") is False
|
||||
class Validator(serializers.Serializer):
|
||||
pass
|
||||
|
||||
|
||||
class ModelValidator(serializers.ModelSerializer):
|
||||
pass
|
|
@ -134,6 +134,25 @@ class ViewSetMixin(object):
|
|||
return super().check_permissions(request, action=action, obj=obj)
|
||||
|
||||
|
||||
class NestedViewSetMixin(object):
|
||||
def get_queryset(self):
|
||||
return self._filter_queryset_by_parents_lookups(super().get_queryset())
|
||||
|
||||
def _filter_queryset_by_parents_lookups(self, queryset):
|
||||
parents_query_dict = self._get_parents_query_dict()
|
||||
if parents_query_dict:
|
||||
return queryset.filter(**parents_query_dict)
|
||||
else:
|
||||
return queryset
|
||||
|
||||
def _get_parents_query_dict(self):
|
||||
result = {}
|
||||
for kwarg_name in self.kwargs:
|
||||
query_value = self.kwargs.get(kwarg_name)
|
||||
result[kwarg_name] = query_value
|
||||
return result
|
||||
|
||||
|
||||
class ViewSet(ViewSetMixin, views.APIView):
|
||||
"""
|
||||
The base ViewSet class does not provide any actions by default.
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
from django_pglocks import advisory_lock
|
||||
|
||||
|
||||
def detail_route(methods=['get'], **kwargs):
|
||||
"""
|
||||
Used to mark a method on a ViewSet that should be routed for detail requests.
|
||||
|
@ -51,12 +52,11 @@ def model_pk_lock(func):
|
|||
"""
|
||||
def decorator(self, *args, **kwargs):
|
||||
from taiga.base.utils.db import get_typename_for_model_class
|
||||
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
|
||||
pk = self.kwargs.get(self.pk_url_kwarg, None)
|
||||
tn = get_typename_for_model_class(self.get_queryset().model)
|
||||
key = "{0}:{1}".format(tn, pk)
|
||||
|
||||
with advisory_lock(key) as acquired_key_lock:
|
||||
with advisory_lock(key):
|
||||
return func(self, *args, **kwargs)
|
||||
|
||||
return decorator
|
||||
|
|
|
@ -51,6 +51,7 @@ In addition Django's built in 403 and 404 exceptions are handled.
|
|||
"""
|
||||
|
||||
from django.core.exceptions import PermissionDenied as DjangoPermissionDenied
|
||||
from django.core.exceptions import ValidationError as DjangoValidationError
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.http import Http404
|
||||
|
@ -224,6 +225,7 @@ class NotEnoughSlotsForProject(BaseException):
|
|||
"total_memberships": total_memberships
|
||||
}
|
||||
|
||||
|
||||
def format_exception(exc):
|
||||
if isinstance(exc.detail, (dict, list, tuple,)):
|
||||
detail = exc.detail
|
||||
|
@ -270,3 +272,6 @@ def exception_handler(exc):
|
|||
|
||||
# Note: Unhandled exceptions will raise a 500 error.
|
||||
return None
|
||||
|
||||
|
||||
ValidationError = DjangoValidationError
|
||||
|
|
|
@ -18,13 +18,17 @@
|
|||
|
||||
from django.forms import widgets
|
||||
from django.utils.translation import ugettext as _
|
||||
from taiga.base.api import serializers, ISO_8601
|
||||
from taiga.base.api.settings import api_settings
|
||||
|
||||
from taiga.base.api import serializers
|
||||
import serpy
|
||||
|
||||
|
||||
####################################################################
|
||||
## Serializer fields
|
||||
# DRF Serializer fields (OLD)
|
||||
####################################################################
|
||||
# NOTE: This should be in other place, for example taiga.base.api.serializers
|
||||
|
||||
|
||||
class JsonField(serializers.WritableField):
|
||||
"""
|
||||
|
@ -39,40 +43,6 @@ class JsonField(serializers.WritableField):
|
|||
return data
|
||||
|
||||
|
||||
class I18NJsonField(JsonField):
|
||||
"""
|
||||
Json objects serializer.
|
||||
"""
|
||||
widget = widgets.Textarea
|
||||
|
||||
def __init__(self, i18n_fields=(), *args, **kwargs):
|
||||
super(I18NJsonField, self).__init__(*args, **kwargs)
|
||||
self.i18n_fields = i18n_fields
|
||||
|
||||
def translate_values(self, d):
|
||||
i18n_d = {}
|
||||
if d is None:
|
||||
return d
|
||||
|
||||
for key, value in d.items():
|
||||
if isinstance(value, dict):
|
||||
i18n_d[key] = self.translate_values(value)
|
||||
|
||||
if key in self.i18n_fields:
|
||||
if isinstance(value, list):
|
||||
i18n_d[key] = [e is not None and _(str(e)) or e for e in value]
|
||||
if isinstance(value, str):
|
||||
i18n_d[key] = value is not None and _(value) or value
|
||||
else:
|
||||
i18n_d[key] = value
|
||||
|
||||
return i18n_d
|
||||
|
||||
def to_native(self, obj):
|
||||
i18n_obj = self.translate_values(obj)
|
||||
return i18n_obj
|
||||
|
||||
|
||||
class PgArrayField(serializers.WritableField):
|
||||
"""
|
||||
PgArray objects serializer.
|
||||
|
@ -99,38 +69,81 @@ class PickledObjectField(serializers.WritableField):
|
|||
return data
|
||||
|
||||
|
||||
class TagsField(serializers.WritableField):
|
||||
"""
|
||||
Pickle objects serializer.
|
||||
"""
|
||||
def to_native(self, obj):
|
||||
return obj
|
||||
|
||||
def from_native(self, data):
|
||||
if not data:
|
||||
return data
|
||||
|
||||
ret = sum([tag.split(",") for tag in data], [])
|
||||
return ret
|
||||
|
||||
|
||||
class TagsColorsField(serializers.WritableField):
|
||||
"""
|
||||
PgArray objects serializer.
|
||||
"""
|
||||
widget = widgets.Textarea
|
||||
|
||||
def to_native(self, obj):
|
||||
return dict(obj)
|
||||
|
||||
def from_native(self, data):
|
||||
return list(data.items())
|
||||
|
||||
|
||||
|
||||
class WatchersField(serializers.WritableField):
|
||||
def to_native(self, obj):
|
||||
return obj
|
||||
|
||||
def from_native(self, data):
|
||||
return data
|
||||
|
||||
|
||||
####################################################################
|
||||
# Serpy fields (NEW)
|
||||
####################################################################
|
||||
|
||||
class Field(serpy.Field):
|
||||
pass
|
||||
|
||||
|
||||
class MethodField(serpy.MethodField):
|
||||
pass
|
||||
|
||||
|
||||
class I18NField(Field):
|
||||
def to_value(self, value):
|
||||
ret = super(I18NField, self).to_value(value)
|
||||
return _(ret)
|
||||
|
||||
|
||||
class I18NJsonField(Field):
|
||||
"""
|
||||
Json objects serializer.
|
||||
"""
|
||||
def __init__(self, i18n_fields=(), *args, **kwargs):
|
||||
super(I18NJsonField, self).__init__(*args, **kwargs)
|
||||
self.i18n_fields = i18n_fields
|
||||
|
||||
def translate_values(self, d):
|
||||
i18n_d = {}
|
||||
if d is None:
|
||||
return d
|
||||
|
||||
for key, value in d.items():
|
||||
if isinstance(value, dict):
|
||||
i18n_d[key] = self.translate_values(value)
|
||||
|
||||
if key in self.i18n_fields:
|
||||
if isinstance(value, list):
|
||||
i18n_d[key] = [e is not None and _(str(e)) or e for e in value]
|
||||
if isinstance(value, str):
|
||||
i18n_d[key] = value is not None and _(value) or value
|
||||
else:
|
||||
i18n_d[key] = value
|
||||
|
||||
return i18n_d
|
||||
|
||||
def to_native(self, obj):
|
||||
i18n_obj = self.translate_values(obj)
|
||||
return i18n_obj
|
||||
|
||||
|
||||
class FileField(Field):
|
||||
def to_value(self, value):
|
||||
if value:
|
||||
return value.name
|
||||
return None
|
||||
|
||||
|
||||
class DateTimeField(Field):
|
||||
format = api_settings.DATETIME_FORMAT
|
||||
|
||||
def to_value(self, value):
|
||||
if value is None or self.format is None:
|
||||
return value
|
||||
|
||||
if self.format.lower() == ISO_8601:
|
||||
ret = value.isoformat()
|
||||
if ret.endswith("+00:00"):
|
||||
ret = ret[:-6] + "Z"
|
||||
return ret
|
||||
return value.strftime(self.format)
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
|
||||
import logging
|
||||
|
||||
from dateutil.parser import parse as parse_date
|
||||
|
||||
from django.apps import apps
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db.models import Q
|
||||
|
@ -30,7 +32,6 @@ from taiga.base.utils.db import to_tsquery
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
#####################################################################
|
||||
# Base and Mixins
|
||||
#####################################################################
|
||||
|
@ -152,13 +153,17 @@ class PermissionBasedFilterBackend(FilterBackend):
|
|||
else:
|
||||
qs = qs.filter(project__anon_permissions__contains=[self.permission])
|
||||
|
||||
return super().filter_queryset(request, qs.distinct(), view)
|
||||
return super().filter_queryset(request, qs, view)
|
||||
|
||||
|
||||
class CanViewProjectFilterBackend(PermissionBasedFilterBackend):
|
||||
permission = "view_project"
|
||||
|
||||
|
||||
class CanViewEpicsFilterBackend(PermissionBasedFilterBackend):
|
||||
permission = "view_epics"
|
||||
|
||||
|
||||
class CanViewUsFilterBackend(PermissionBasedFilterBackend):
|
||||
permission = "view_us"
|
||||
|
||||
|
@ -197,6 +202,10 @@ class PermissionBasedAttachmentFilterBackend(PermissionBasedFilterBackend):
|
|||
return qs.filter(content_type=ct)
|
||||
|
||||
|
||||
class CanViewEpicAttachmentFilterBackend(PermissionBasedAttachmentFilterBackend):
|
||||
permission = "view_epics"
|
||||
|
||||
|
||||
class CanViewUserStoryAttachmentFilterBackend(PermissionBasedAttachmentFilterBackend):
|
||||
permission = "view_us"
|
||||
|
||||
|
@ -229,7 +238,7 @@ class MembersFilterBackend(PermissionBasedFilterBackend):
|
|||
project_id = int(request.QUERY_PARAMS["project"])
|
||||
except:
|
||||
logger.error("Filtering project diferent value than an integer: {}".format(
|
||||
request.QUERY_PARAMS["project"]))
|
||||
request.QUERY_PARAMS["project"]))
|
||||
raise exc.BadRequest(_("'project' must be an integer value."))
|
||||
|
||||
if project_id:
|
||||
|
@ -256,14 +265,14 @@ class MembersFilterBackend(PermissionBasedFilterBackend):
|
|||
|
||||
q = Q(memberships__project_id__in=projects_list) | Q(id=request.user.id)
|
||||
|
||||
#If there is no selected project we want access to users from public projects
|
||||
# If there is no selected project we want access to users from public projects
|
||||
if not project:
|
||||
q = q | Q(memberships__project__public_permissions__contains=[self.permission])
|
||||
|
||||
qs = qs.filter(q)
|
||||
|
||||
else:
|
||||
if project and not "view_project" in project.anon_permissions:
|
||||
if project and "view_project" not in project.anon_permissions:
|
||||
qs = qs.none()
|
||||
|
||||
qs = qs.filter(memberships__project__anon_permissions__contains=[self.permission])
|
||||
|
@ -307,7 +316,7 @@ class IsProjectAdminFilterBackend(FilterBackend, BaseIsProjectAdminFilterBackend
|
|||
else:
|
||||
queryset = queryset.filter(project_id__in=project_ids)
|
||||
|
||||
return super().filter_queryset(request, queryset.distinct(), view)
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class IsProjectAdminFromWebhookLogFilterBackend(FilterBackend, BaseIsProjectAdminFilterBackend):
|
||||
|
@ -328,10 +337,16 @@ class IsProjectAdminFromWebhookLogFilterBackend(FilterBackend, BaseIsProjectAdmi
|
|||
#####################################################################
|
||||
|
||||
class BaseRelatedFieldsFilter(FilterBackend):
|
||||
def __init__(self, filter_name=None):
|
||||
filter_name = None
|
||||
param_name = None
|
||||
|
||||
def __init__(self, filter_name=None, param_name=None):
|
||||
if filter_name:
|
||||
self.filter_name = filter_name
|
||||
|
||||
if param_name:
|
||||
self.param_name = param_name
|
||||
|
||||
def _prepare_filter_data(self, query_param_value):
|
||||
def _transform_value(value):
|
||||
try:
|
||||
|
@ -346,7 +361,8 @@ class BaseRelatedFieldsFilter(FilterBackend):
|
|||
return list(values)
|
||||
|
||||
def _get_queryparams(self, params):
|
||||
raw_value = params.get(self.filter_name, None)
|
||||
param_name = self.param_name or self.filter_name
|
||||
raw_value = params.get(param_name, None)
|
||||
|
||||
if raw_value:
|
||||
value = self._prepare_filter_data(raw_value)
|
||||
|
@ -433,13 +449,14 @@ class WatchersFilter(FilterBackend):
|
|||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
query_watchers = self._get_watchers_queryparams(request.QUERY_PARAMS)
|
||||
model = queryset.model
|
||||
if query_watchers:
|
||||
WatchedModel = apps.get_model("notifications", "Watched")
|
||||
watched_type = ContentType.objects.get_for_model(queryset.model)
|
||||
|
||||
try:
|
||||
watched_ids = WatchedModel.objects.filter(content_type=watched_type, user__id__in=query_watchers).values_list("object_id", flat=True)
|
||||
watched_ids = (WatchedModel.objects.filter(content_type=watched_type,
|
||||
user__id__in=query_watchers)
|
||||
.values_list("object_id", flat=True))
|
||||
queryset = queryset.filter(id__in=watched_ids)
|
||||
except ValueError:
|
||||
raise exc.BadRequest(_("Error in filter params types."))
|
||||
|
@ -447,6 +464,68 @@ class WatchersFilter(FilterBackend):
|
|||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class BaseCompareFilter(FilterBackend):
|
||||
operators = ["", "lt", "gt", "lte", "gte"]
|
||||
|
||||
def __init__(self, filter_name_base=None, operators=None):
|
||||
if filter_name_base:
|
||||
self.filter_name_base = filter_name_base
|
||||
|
||||
def _get_filter_names(self):
|
||||
return [
|
||||
self._get_filter_name(operator)
|
||||
for operator in self.operators
|
||||
]
|
||||
|
||||
def _get_filter_name(self, operator):
|
||||
if operator and len(operator) > 0:
|
||||
return "{base}__{operator}".format(
|
||||
base=self.filter_name_base, operator=operator
|
||||
)
|
||||
else:
|
||||
return self.filter_name_base
|
||||
|
||||
def _get_constraints(self, params):
|
||||
constraints = {}
|
||||
for filter_name in self._get_filter_names():
|
||||
raw_value = params.get(filter_name, None)
|
||||
if raw_value is not None:
|
||||
constraints[filter_name] = self._get_value(raw_value)
|
||||
return constraints
|
||||
|
||||
def _get_value(self, raw_value):
|
||||
return raw_value
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
constraints = self._get_constraints(request.QUERY_PARAMS)
|
||||
|
||||
if len(constraints) > 0:
|
||||
queryset = queryset.filter(**constraints)
|
||||
|
||||
return super().filter_queryset(request, queryset, view)
|
||||
|
||||
|
||||
class BaseDateFilter(BaseCompareFilter):
|
||||
def _get_value(self, raw_value):
|
||||
return parse_date(raw_value)
|
||||
|
||||
|
||||
class CreatedDateFilter(BaseDateFilter):
|
||||
filter_name_base = "created_date"
|
||||
|
||||
|
||||
class ModifiedDateFilter(BaseDateFilter):
|
||||
filter_name_base = "modified_date"
|
||||
|
||||
|
||||
class FinishedDateFilter(BaseDateFilter):
|
||||
filter_name_base = "finished_date"
|
||||
|
||||
|
||||
class FinishDateFilter(BaseDateFilter):
|
||||
filter_name_base = "finish_date"
|
||||
|
||||
|
||||
#####################################################################
|
||||
# Text search filters
|
||||
#####################################################################
|
||||
|
@ -459,6 +538,7 @@ class QFilter(FilterBackend):
|
|||
where_clause = ("""
|
||||
to_tsvector('english_nostop',
|
||||
coalesce({table}.subject, '') || ' ' ||
|
||||
coalesce(array_to_string({table}.tags, ' '), '') || ' ' ||
|
||||
coalesce({table}.ref) || ' ' ||
|
||||
coalesce({table}.description, '')) @@ to_tsquery('english_nostop', %s)
|
||||
""".format(table=table))
|
||||
|
|
|
@ -25,7 +25,7 @@ COORS_ALLOWED_METHODS = ["POST", "GET", "OPTIONS", "PUT", "DELETE", "PATCH", "HE
|
|||
COORS_ALLOWED_HEADERS = ["content-type", "x-requested-with",
|
||||
"authorization", "accept-encoding",
|
||||
"x-disable-pagination", "x-lazy-pagination",
|
||||
"x-host", "x-session-id"]
|
||||
"x-host", "x-session-id", "set-orders"]
|
||||
COORS_ALLOWED_CREDENTIALS = True
|
||||
COORS_EXPOSE_HEADERS = ["x-pagination-count", "x-paginated", "x-paginated-by",
|
||||
"x-pagination-current", "x-pagination-next", "x-pagination-prev",
|
||||
|
|
|
@ -23,6 +23,7 @@ from django.db import connection
|
|||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.models.sql.datastructures import EmptyResultSet
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.fields import Field, MethodField
|
||||
|
||||
Neighbor = namedtuple("Neighbor", "left right")
|
||||
|
||||
|
@ -71,7 +72,6 @@ def get_neighbors(obj, results_set=None):
|
|||
if row is None:
|
||||
return Neighbor(None, None)
|
||||
|
||||
obj_position = row[1] - 1
|
||||
left_object_id = row[2]
|
||||
right_object_id = row[3]
|
||||
|
||||
|
@ -88,13 +88,19 @@ def get_neighbors(obj, results_set=None):
|
|||
return Neighbor(left, right)
|
||||
|
||||
|
||||
class NeighborsSerializerMixin:
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.fields["neighbors"] = serializers.SerializerMethodField("get_neighbors")
|
||||
class NeighborSerializer(serializers.LightSerializer):
|
||||
id = Field()
|
||||
ref = Field()
|
||||
subject = Field()
|
||||
|
||||
|
||||
class NeighborsSerializerMixin(serializers.LightSerializer):
|
||||
neighbors = MethodField()
|
||||
|
||||
def serialize_neighbor(self, neighbor):
|
||||
raise NotImplementedError
|
||||
if neighbor:
|
||||
return NeighborSerializer(neighbor).data
|
||||
return None
|
||||
|
||||
def get_neighbors(self, obj):
|
||||
view, request = self.context.get("view", None), self.context.get("request", None)
|
||||
|
|
|
@ -318,7 +318,58 @@ class DRFDefaultRouter(SimpleRouter):
|
|||
return urls
|
||||
|
||||
|
||||
class DefaultRouter(DRFDefaultRouter):
|
||||
class NestedRegistryItem(object):
|
||||
def __init__(self, router, parent_prefix, parent_item=None):
|
||||
self.router = router
|
||||
self.parent_prefix = parent_prefix
|
||||
self.parent_item = parent_item
|
||||
|
||||
def register(self, prefix, viewset, base_name, parents_query_lookups):
|
||||
self.router._register(
|
||||
prefix=self.get_prefix(current_prefix=prefix, parents_query_lookups=parents_query_lookups),
|
||||
viewset=viewset,
|
||||
base_name=base_name,
|
||||
)
|
||||
return NestedRegistryItem(
|
||||
router=self.router,
|
||||
parent_prefix=prefix,
|
||||
parent_item=self
|
||||
)
|
||||
|
||||
def get_prefix(self, current_prefix, parents_query_lookups):
|
||||
return "{0}/{1}".format(
|
||||
self.get_parent_prefix(parents_query_lookups),
|
||||
current_prefix
|
||||
)
|
||||
|
||||
def get_parent_prefix(self, parents_query_lookups):
|
||||
prefix = "/"
|
||||
current_item = self
|
||||
i = len(parents_query_lookups) - 1
|
||||
while current_item:
|
||||
prefix = "{parent_prefix}/(?P<{parent_pk_kwarg_name}>[^/.]+)/{prefix}".format(
|
||||
parent_prefix=current_item.parent_prefix,
|
||||
parent_pk_kwarg_name=parents_query_lookups[i],
|
||||
prefix=prefix
|
||||
)
|
||||
i -= 1
|
||||
current_item = current_item.parent_item
|
||||
return prefix.strip("/")
|
||||
|
||||
|
||||
class NestedRouterMixin:
|
||||
def _register(self, *args, **kwargs):
|
||||
return super().register(*args, **kwargs)
|
||||
|
||||
def register(self, *args, **kwargs):
|
||||
self._register(*args, **kwargs)
|
||||
return NestedRegistryItem(
|
||||
router=self,
|
||||
parent_prefix=self.registry[-1][0]
|
||||
)
|
||||
|
||||
|
||||
class DefaultRouter(NestedRouterMixin, DRFDefaultRouter):
|
||||
pass
|
||||
|
||||
__all__ = ["DefaultRouter"]
|
||||
|
|
|
@ -425,7 +425,7 @@
|
|||
<a href="{{ support_url }}" title="Support page" style="color: #9dce0a">{{ support_url}}</a>
|
||||
<br>
|
||||
<strong>Contact us:</strong>
|
||||
<a href="mailto:{{ support_email }}" title="Supporti email" style="color: #9dce0a">
|
||||
<a href="mailto:{{ support_email }}" title="Support email" style="color: #9dce0a">
|
||||
{{ support_email }}
|
||||
</a>
|
||||
<br>
|
||||
|
|
|
@ -399,7 +399,7 @@
|
|||
<a href="{{ support_url }}" title="Support page" style="color: #9dce0a">{{ support_url}}</a>
|
||||
<br>
|
||||
<strong>Contact us:</strong>
|
||||
<a href="mailto:{{ support_email }}" title="Supporti email" style="color: #9dce0a">
|
||||
<a href="mailto:{{ support_email }}" title="Support email" style="color: #9dce0a">
|
||||
{{ support_email }}
|
||||
</a>
|
||||
<br>
|
||||
|
|
|
@ -461,7 +461,7 @@
|
|||
<a href="{{ support_url }}" title="Support page" style="color: #9dce0a">{{ support_url}}</a>
|
||||
<br>
|
||||
<strong>Contact us:</strong>
|
||||
<a href="mailto:{{ support_email }}" title="Supporti email" style="color: #9dce0a">
|
||||
<a href="mailto:{{ support_email }}" title="Support email" style="color: #9dce0a">
|
||||
{{ support_email }}
|
||||
</a>
|
||||
<br>
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import collections
|
||||
|
||||
|
||||
class OrderedSet(collections.MutableSet):
|
||||
# Extract from:
|
||||
# - https://docs.python.org/3/library/collections.abc.html?highlight=orderedset
|
||||
# - https://code.activestate.com/recipes/576694/
|
||||
def __init__(self, iterable=None):
|
||||
self.end = end = []
|
||||
end += [None, end, end] # sentinel node for doubly linked list
|
||||
self.map = {} # key --> [key, prev, next]
|
||||
if iterable is not None:
|
||||
self |= iterable
|
||||
|
||||
def __len__(self):
|
||||
return len(self.map)
|
||||
|
||||
def __contains__(self, key):
|
||||
return key in self.map
|
||||
|
||||
def add(self, key):
|
||||
if key not in self.map:
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
curr[2] = end[1] = self.map[key] = [key, curr, end]
|
||||
|
||||
def discard(self, key):
|
||||
if key in self.map:
|
||||
key, prev, next = self.map.pop(key)
|
||||
prev[2] = next
|
||||
next[1] = prev
|
||||
|
||||
def __iter__(self):
|
||||
end = self.end
|
||||
curr = end[2]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[2]
|
||||
|
||||
def __reversed__(self):
|
||||
end = self.end
|
||||
curr = end[1]
|
||||
while curr is not end:
|
||||
yield curr[0]
|
||||
curr = curr[1]
|
||||
|
||||
def pop(self, last=True):
|
||||
if not self:
|
||||
raise KeyError('set is empty')
|
||||
key = self.end[1][0] if last else self.end[2][0]
|
||||
self.discard(key)
|
||||
return key
|
||||
|
||||
def __repr__(self):
|
||||
if not self:
|
||||
return '%s()' % (self.__class__.__name__,)
|
||||
return '%s(%r)' % (self.__class__.__name__, list(self))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, OrderedSet):
|
||||
return len(self) == len(other) and list(self) == list(other)
|
||||
return set(self) == set(other)
|
|
@ -0,0 +1,56 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import random
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
DEFAULT_PREDEFINED_COLORS = (
|
||||
"#fce94f",
|
||||
"#edd400",
|
||||
"#c4a000",
|
||||
"#8ae234",
|
||||
"#73d216",
|
||||
"#4e9a06",
|
||||
"#d3d7cf",
|
||||
"#fcaf3e",
|
||||
"#f57900",
|
||||
"#ce5c00",
|
||||
"#729fcf",
|
||||
"#3465a4",
|
||||
"#204a87",
|
||||
"#888a85",
|
||||
"#ad7fa8",
|
||||
"#75507b",
|
||||
"#5c3566",
|
||||
"#ef2929",
|
||||
"#cc0000",
|
||||
"#a40000"
|
||||
)
|
||||
|
||||
PREDEFINED_COLORS = getattr(settings, "PREDEFINED_COLORS", DEFAULT_PREDEFINED_COLORS)
|
||||
|
||||
|
||||
def generate_random_hex_color():
|
||||
return "#{:06x}".format(random.randint(0,0xFFFFFF))
|
||||
|
||||
|
||||
def generate_random_predefined_hex_color():
|
||||
return random.choice(PREDEFINED_COLORS)
|
||||
|
|
@ -17,6 +17,7 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import connection
|
||||
from django.db import transaction
|
||||
from django.shortcuts import _get_queryset
|
||||
|
||||
|
@ -26,6 +27,7 @@ from . import functions
|
|||
|
||||
import re
|
||||
|
||||
|
||||
def get_object_or_none(klass, *args, **kwargs):
|
||||
"""
|
||||
Uses get() to return an object, or None if the object does not exist.
|
||||
|
@ -81,6 +83,7 @@ def save_in_bulk(instances, callback=None, precall=None, **save_options):
|
|||
:params callback: Callback to call after each save.
|
||||
:params save_options: Additional options to use when saving each instance.
|
||||
"""
|
||||
ret = []
|
||||
if callback is None:
|
||||
callback = functions.noop
|
||||
|
||||
|
@ -96,6 +99,7 @@ def save_in_bulk(instances, callback=None, precall=None, **save_options):
|
|||
instance.save(**save_options)
|
||||
callback(instance, created=created)
|
||||
|
||||
return ret
|
||||
|
||||
@transaction.atomic
|
||||
def update_in_bulk(instances, list_of_new_values, callback=None, precall=None):
|
||||
|
@ -119,19 +123,28 @@ def update_in_bulk(instances, list_of_new_values, callback=None, precall=None):
|
|||
callback(instance)
|
||||
|
||||
|
||||
def update_in_bulk_with_ids(ids, list_of_new_values, model):
|
||||
def update_attr_in_bulk_for_ids(values, attr, model):
|
||||
"""Update a table using a list of ids.
|
||||
|
||||
:params ids: List of ids.
|
||||
:params new_values: List of dicts or duples where each dict/duple is the new data corresponding
|
||||
to the instance in the same index position as the dict.
|
||||
:param model: Model of the ids.
|
||||
:params values: Dict of new values where the key is the pk of the element to update.
|
||||
:params attr: attr to update
|
||||
:params model: Model of the ids.
|
||||
"""
|
||||
tn = get_typename_for_model_class(model)
|
||||
for id, new_values in zip(ids, list_of_new_values):
|
||||
key = "{0}:{1}".format(tn, id)
|
||||
with advisory_lock(key) as acquired_key_lock:
|
||||
model.objects.filter(id=id).update(**new_values)
|
||||
values = [str((id, order)) for id, order in values.items()]
|
||||
sql = """
|
||||
UPDATE "{tbl}"
|
||||
SET "{attr}"=update_values.column2
|
||||
FROM (
|
||||
VALUES
|
||||
{values}
|
||||
) AS update_values
|
||||
WHERE "{tbl}"."id"=update_values.column1;
|
||||
""".format(tbl=model._meta.db_table,
|
||||
values=', '.join(values),
|
||||
attr=attr)
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(sql)
|
||||
|
||||
|
||||
def to_tsquery(term):
|
||||
|
|
|
@ -25,3 +25,7 @@ def dict_sum(*args):
|
|||
assert isinstance(arg, dict)
|
||||
result += collections.Counter(arg)
|
||||
return result
|
||||
|
||||
|
||||
def into_namedtuple(dictionary):
|
||||
return collections.namedtuple('GenericDict', dictionary.keys())(**dictionary)
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import time
|
||||
|
||||
|
||||
def timestamp_ms():
|
||||
return int(time.time() * 1000)
|
|
@ -34,6 +34,7 @@ from taiga.base import exceptions as exc
|
|||
from taiga.base import response
|
||||
from taiga.base.api.mixins import CreateModelMixin
|
||||
from taiga.base.api.viewsets import GenericViewSet
|
||||
from taiga.projects import utils as project_utils
|
||||
from taiga.projects.models import Project, Membership
|
||||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.tasks.models import Task
|
||||
|
@ -43,11 +44,11 @@ from taiga.users import services as users_services
|
|||
from . import exceptions as err
|
||||
from . import mixins
|
||||
from . import permissions
|
||||
from . import validators
|
||||
from . import serializers
|
||||
from . import services
|
||||
from . import tasks
|
||||
from . import throttling
|
||||
from .renderers import ExportRenderer
|
||||
|
||||
from taiga.base.api.utils import get_object_or_404
|
||||
|
||||
|
@ -75,13 +76,11 @@ class ProjectExporterViewSet(mixins.ImportThrottlingPolicyMixin, GenericViewSet)
|
|||
|
||||
if dump_format == "gzip":
|
||||
path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, uuid.uuid4().hex)
|
||||
storage_path = default_storage.path(path)
|
||||
with default_storage.open(storage_path, mode="wb") as outfile:
|
||||
with default_storage.open(path, mode="wb") as outfile:
|
||||
services.render_project(project, gzip.GzipFile(fileobj=outfile))
|
||||
else:
|
||||
path = "exports/{}/{}-{}.json".format(project.pk, project.slug, uuid.uuid4().hex)
|
||||
storage_path = default_storage.path(path)
|
||||
with default_storage.open(storage_path, mode="wb") as outfile:
|
||||
with default_storage.open(path, mode="wb") as outfile:
|
||||
services.render_project(project, outfile)
|
||||
|
||||
response_data = {
|
||||
|
@ -103,9 +102,8 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
|
||||
# Validate if the project can be imported
|
||||
is_private = data.get('is_private', False)
|
||||
total_memberships = len([m for m in data.get("memberships", [])
|
||||
if m.get("email", None) != data["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
total_memberships = len([m for m in data.get("memberships", []) if m.get("email", None) != data["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
(enough_slots, error_message) = users_services.has_available_slot_for_import_new_project(
|
||||
self.request.user,
|
||||
is_private,
|
||||
|
@ -148,31 +146,31 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
# Create project values choicess
|
||||
if "points" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"points", serializers.PointsExportSerializer)
|
||||
"points", validators.PointsExportValidator)
|
||||
if "issue_types" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"issue_types",
|
||||
serializers.IssueTypeExportSerializer)
|
||||
validators.IssueTypeExportValidator)
|
||||
if "issue_statuses" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"issue_statuses",
|
||||
serializers.IssueStatusExportSerializer,)
|
||||
validators.IssueStatusExportValidator,)
|
||||
if "us_statuses" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"us_statuses",
|
||||
serializers.UserStoryStatusExportSerializer,)
|
||||
validators.UserStoryStatusExportValidator,)
|
||||
if "task_statuses" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"task_statuses",
|
||||
serializers.TaskStatusExportSerializer)
|
||||
validators.TaskStatusExportValidator)
|
||||
if "priorities" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"priorities",
|
||||
serializers.PriorityExportSerializer)
|
||||
validators.PriorityExportValidator)
|
||||
if "severities" in data:
|
||||
services.store.store_project_attributes_values(project_serialized.object, data,
|
||||
"severities",
|
||||
serializers.SeverityExportSerializer)
|
||||
validators.SeverityExportValidator)
|
||||
|
||||
if ("points" in data or "issues_types" in data or
|
||||
"issues_statuses" in data or "us_statuses" in data or
|
||||
|
@ -184,17 +182,17 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if "userstorycustomattributes" in data:
|
||||
services.store.store_custom_attributes(project_serialized.object, data,
|
||||
"userstorycustomattributes",
|
||||
serializers.UserStoryCustomAttributeExportSerializer)
|
||||
validators.UserStoryCustomAttributeExportValidator)
|
||||
|
||||
if "taskcustomattributes" in data:
|
||||
services.store.store_custom_attributes(project_serialized.object, data,
|
||||
"taskcustomattributes",
|
||||
serializers.TaskCustomAttributeExportSerializer)
|
||||
validators.TaskCustomAttributeExportValidator)
|
||||
|
||||
if "issuecustomattributes" in data:
|
||||
services.store.store_custom_attributes(project_serialized.object, data,
|
||||
"issuecustomattributes",
|
||||
serializers.IssueCustomAttributeExportSerializer)
|
||||
validators.IssueCustomAttributeExportValidator)
|
||||
|
||||
# Is there any error?
|
||||
errors = services.store.get_errors()
|
||||
|
@ -202,7 +200,7 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
raise exc.BadRequest(errors)
|
||||
|
||||
# Importer process is OK
|
||||
response_data = project_serialized.data
|
||||
response_data = serializers.ProjectExportSerializer(project_serialized.object).data
|
||||
response_data['id'] = project_serialized.object.id
|
||||
headers = self.get_success_headers(response_data)
|
||||
return response.Created(response_data, headers=headers)
|
||||
|
@ -219,8 +217,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(milestone.data)
|
||||
return response.Created(milestone.data, headers=headers)
|
||||
data = serializers.MilestoneExportSerializer(milestone.object).data
|
||||
headers = self.get_success_headers(data)
|
||||
return response.Created(data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -234,8 +233,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(us.data)
|
||||
return response.Created(us.data, headers=headers)
|
||||
data = serializers.UserStoryExportSerializer(us.object).data
|
||||
headers = self.get_success_headers(data)
|
||||
return response.Created(data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -252,8 +252,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(task.data)
|
||||
return response.Created(task.data, headers=headers)
|
||||
data = serializers.TaskExportSerializer(task.object).data
|
||||
headers = self.get_success_headers(data)
|
||||
return response.Created(data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -270,8 +271,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(issue.data)
|
||||
return response.Created(issue.data, headers=headers)
|
||||
data = serializers.IssueExportSerializer(issue.object).data
|
||||
headers = self.get_success_headers(data)
|
||||
return response.Created(data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -285,8 +287,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(wiki_page.data)
|
||||
return response.Created(wiki_page.data, headers=headers)
|
||||
data = serializers.WikiPageExportSerializer(wiki_page.object).data
|
||||
headers = self.get_success_headers(data)
|
||||
return response.Created(data, headers=headers)
|
||||
|
||||
@detail_route(methods=['post'])
|
||||
@method_decorator(atomic)
|
||||
|
@ -300,8 +303,9 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
if errors:
|
||||
raise exc.BadRequest(errors)
|
||||
|
||||
headers = self.get_success_headers(wiki_link.data)
|
||||
return response.Created(wiki_link.data, headers=headers)
|
||||
data = serializers.WikiLinkExportSerializer(wiki_link.object).data
|
||||
headers = self.get_success_headers(data)
|
||||
return response.Created(data, headers=headers)
|
||||
|
||||
@list_route(methods=["POST"])
|
||||
@method_decorator(atomic)
|
||||
|
@ -366,5 +370,7 @@ class ProjectImporterViewSet(mixins.ImportThrottlingPolicyMixin, CreateModelMixi
|
|||
return response.BadRequest({"error": e.message, "details": e.errors})
|
||||
else:
|
||||
# On Success
|
||||
response_data = ProjectSerializer(project).data
|
||||
project_from_qs = project_utils.attach_extra_info(Project.objects.all()).get(id=project.id)
|
||||
response_data = ProjectSerializer(project_from_qs).data
|
||||
|
||||
return response.Created(response_data)
|
||||
|
|
|
@ -22,7 +22,7 @@ from django.conf import settings
|
|||
|
||||
from taiga.projects.models import Project
|
||||
from taiga.users.models import User
|
||||
from taiga.permissions.service import is_project_admin
|
||||
from taiga.permissions.services import is_project_admin
|
||||
from taiga.export_import import tasks
|
||||
|
||||
|
||||
|
|
|
@ -50,24 +50,27 @@ class Command(BaseCommand):
|
|||
|
||||
data = json.loads(open(dump_file_path, 'r').read())
|
||||
try:
|
||||
with transaction.atomic():
|
||||
if overwrite:
|
||||
receivers_back = signals.post_delete.receivers
|
||||
signals.post_delete.receivers = []
|
||||
try:
|
||||
proj = Project.objects.get(slug=data.get("slug", "not a slug"))
|
||||
proj.tasks.all().delete()
|
||||
proj.user_stories.all().delete()
|
||||
proj.issues.all().delete()
|
||||
proj.memberships.all().delete()
|
||||
proj.roles.all().delete()
|
||||
proj.delete()
|
||||
except Project.DoesNotExist:
|
||||
pass
|
||||
signals.post_delete.receivers = receivers_back
|
||||
if overwrite:
|
||||
receivers_back = signals.post_delete.receivers
|
||||
signals.post_delete.receivers = []
|
||||
try:
|
||||
proj = Project.objects.get(slug=data.get("slug", "not a slug"))
|
||||
proj.tasks.all().delete()
|
||||
proj.user_stories.all().delete()
|
||||
proj.issues.all().delete()
|
||||
proj.memberships.all().delete()
|
||||
proj.roles.all().delete()
|
||||
proj.delete()
|
||||
except Project.DoesNotExist:
|
||||
pass
|
||||
signals.post_delete.receivers = receivers_back
|
||||
else:
|
||||
slug = data.get('slug', None)
|
||||
if slug is not None and Project.objects.filter(slug=slug).exists():
|
||||
del data['slug']
|
||||
|
||||
user = User.objects.get(email=owner_email)
|
||||
services.store_project_from_dict(data, user)
|
||||
user = User.objects.get(email=owner_email)
|
||||
services.store_project_from_dict(data, user)
|
||||
except err.TaigaImportError as e:
|
||||
if e.project:
|
||||
e.project.delete_related_content()
|
||||
|
|
|
@ -23,7 +23,7 @@ _cache_user_by_email = {}
|
|||
_custom_tasks_attributes_cache = {}
|
||||
_custom_issues_attributes_cache = {}
|
||||
_custom_userstories_attributes_cache = {}
|
||||
|
||||
_custom_epics_attributes_cache = {}
|
||||
|
||||
def cached_get_user_by_pk(pk):
|
||||
if pk not in _cache_user_by_pk:
|
||||
|
|
|
@ -21,24 +21,15 @@ import os
|
|||
import copy
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.fields import JsonField
|
||||
from taiga.mdrender.service import render as mdrender
|
||||
from taiga.base.fields import Field
|
||||
from taiga.users import models as users_models
|
||||
|
||||
from .cache import cached_get_user_by_email, cached_get_user_by_pk
|
||||
from .cache import cached_get_user_by_pk
|
||||
|
||||
|
||||
class FileField(serializers.WritableField):
|
||||
read_only = False
|
||||
|
||||
def to_native(self, obj):
|
||||
class FileField(Field):
|
||||
def to_value(self, obj):
|
||||
if not obj:
|
||||
return None
|
||||
|
||||
|
@ -49,202 +40,74 @@ class FileField(serializers.WritableField):
|
|||
("name", os.path.basename(obj.name)),
|
||||
])
|
||||
|
||||
def from_native(self, data):
|
||||
if not data:
|
||||
return None
|
||||
|
||||
decoded_data = b''
|
||||
# The original file was encoded by chunks but we don't really know its
|
||||
# length or if it was multiple of 3 so we must iterate over all those chunks
|
||||
# decoding them one by one
|
||||
for decoding_chunk in data['data'].split("="):
|
||||
# When encoding to base64 3 bytes are transformed into 4 bytes and
|
||||
# the extra space of the block is filled with =
|
||||
# We must ensure that the decoding chunk has a length multiple of 4 so
|
||||
# we restore the stripped '='s adding appending them until the chunk has
|
||||
# a length multiple of 4
|
||||
decoding_chunk += "=" * (-len(decoding_chunk) % 4)
|
||||
decoded_data += base64.b64decode(decoding_chunk+"=")
|
||||
|
||||
return ContentFile(decoded_data, name=data['name'])
|
||||
|
||||
|
||||
class ContentTypeField(serializers.RelatedField):
|
||||
read_only = False
|
||||
|
||||
def to_native(self, obj):
|
||||
class ContentTypeField(Field):
|
||||
def to_value(self, obj):
|
||||
if obj:
|
||||
return [obj.app_label, obj.model]
|
||||
return None
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
return ContentType.objects.get_by_natural_key(*data)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
class RelatedNoneSafeField(serializers.RelatedField):
|
||||
def field_from_native(self, data, files, field_name, into):
|
||||
if self.read_only:
|
||||
return
|
||||
|
||||
try:
|
||||
if self.many:
|
||||
try:
|
||||
# Form data
|
||||
value = data.getlist(field_name)
|
||||
if value == [''] or value == []:
|
||||
raise KeyError
|
||||
except AttributeError:
|
||||
# Non-form data
|
||||
value = data[field_name]
|
||||
else:
|
||||
value = data[field_name]
|
||||
except KeyError:
|
||||
if self.partial:
|
||||
return
|
||||
value = self.get_default_value()
|
||||
|
||||
key = self.source or field_name
|
||||
if value in self.null_values:
|
||||
if self.required:
|
||||
raise ValidationError(self.error_messages['required'])
|
||||
into[key] = None
|
||||
elif self.many:
|
||||
into[key] = [self.from_native(item) for item in value if self.from_native(item) is not None]
|
||||
else:
|
||||
into[key] = self.from_native(value)
|
||||
|
||||
|
||||
class UserRelatedField(RelatedNoneSafeField):
|
||||
read_only = False
|
||||
|
||||
def to_native(self, obj):
|
||||
class UserRelatedField(Field):
|
||||
def to_value(self, obj):
|
||||
if obj:
|
||||
return obj.email
|
||||
return None
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
return cached_get_user_by_email(data)
|
||||
except users_models.User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class UserPkField(serializers.RelatedField):
|
||||
read_only = False
|
||||
|
||||
def to_native(self, obj):
|
||||
class UserPkField(Field):
|
||||
def to_value(self, obj):
|
||||
try:
|
||||
user = cached_get_user_by_pk(obj)
|
||||
return user.email
|
||||
except users_models.User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
user = cached_get_user_by_email(data)
|
||||
return user.pk
|
||||
except users_models.User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class CommentField(serializers.WritableField):
|
||||
read_only = False
|
||||
|
||||
def field_from_native(self, data, files, field_name, into):
|
||||
super().field_from_native(data, files, field_name, into)
|
||||
into["comment_html"] = mdrender(self.context['project'], data.get("comment", ""))
|
||||
|
||||
|
||||
class ProjectRelatedField(serializers.RelatedField):
|
||||
read_only = False
|
||||
null_values = (None, "")
|
||||
|
||||
class SlugRelatedField(Field):
|
||||
def __init__(self, slug_field, *args, **kwargs):
|
||||
self.slug_field = slug_field
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def to_native(self, obj):
|
||||
def to_value(self, obj):
|
||||
if obj:
|
||||
return getattr(obj, self.slug_field)
|
||||
return None
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
kwargs = {self.slug_field: data, "project": self.context['project']}
|
||||
return self.queryset.get(**kwargs)
|
||||
except ObjectDoesNotExist:
|
||||
raise ValidationError(_("{}=\"{}\" not found in this project".format(self.slug_field, data)))
|
||||
|
||||
|
||||
class HistoryUserField(JsonField):
|
||||
def to_native(self, obj):
|
||||
class HistoryUserField(Field):
|
||||
def to_value(self, obj):
|
||||
if obj is None or obj == {}:
|
||||
return []
|
||||
try:
|
||||
user = cached_get_user_by_pk(obj['pk'])
|
||||
except users_models.User.DoesNotExist:
|
||||
user = None
|
||||
return (UserRelatedField().to_native(user), obj['name'])
|
||||
|
||||
def from_native(self, data):
|
||||
if data is None:
|
||||
return {}
|
||||
|
||||
if len(data) < 2:
|
||||
return {}
|
||||
|
||||
user = UserRelatedField().from_native(data[0])
|
||||
|
||||
if user:
|
||||
pk = user.pk
|
||||
else:
|
||||
pk = None
|
||||
|
||||
return {"pk": pk, "name": data[1]}
|
||||
return (UserRelatedField().to_value(user), obj['name'])
|
||||
|
||||
|
||||
class HistoryValuesField(JsonField):
|
||||
def to_native(self, obj):
|
||||
class HistoryValuesField(Field):
|
||||
def to_value(self, obj):
|
||||
if obj is None:
|
||||
return []
|
||||
if "users" in obj:
|
||||
obj['users'] = list(map(UserPkField().to_native, obj['users']))
|
||||
obj['users'] = list(map(UserPkField().to_value, obj['users']))
|
||||
return obj
|
||||
|
||||
def from_native(self, data):
|
||||
if data is None:
|
||||
return []
|
||||
if "users" in data:
|
||||
data['users'] = list(map(UserPkField().from_native, data['users']))
|
||||
return data
|
||||
|
||||
|
||||
class HistoryDiffField(JsonField):
|
||||
def to_native(self, obj):
|
||||
class HistoryDiffField(Field):
|
||||
def to_value(self, obj):
|
||||
if obj is None:
|
||||
return []
|
||||
|
||||
if "assigned_to" in obj:
|
||||
obj['assigned_to'] = list(map(UserPkField().to_native, obj['assigned_to']))
|
||||
obj['assigned_to'] = list(map(UserPkField().to_value, obj['assigned_to']))
|
||||
|
||||
return obj
|
||||
|
||||
def from_native(self, data):
|
||||
if data is None:
|
||||
return []
|
||||
|
||||
if "assigned_to" in data:
|
||||
data['assigned_to'] = list(map(UserPkField().from_native, data['assigned_to']))
|
||||
return data
|
||||
|
||||
|
||||
class TimelineDataField(serializers.WritableField):
|
||||
read_only = False
|
||||
|
||||
def to_native(self, data):
|
||||
class TimelineDataField(Field):
|
||||
def to_value(self, data):
|
||||
new_data = copy.deepcopy(data)
|
||||
try:
|
||||
user = cached_get_user_by_pk(new_data["user"]["id"])
|
||||
|
@ -253,14 +116,3 @@ class TimelineDataField(serializers.WritableField):
|
|||
except Exception:
|
||||
pass
|
||||
return new_data
|
||||
|
||||
def from_native(self, data):
|
||||
new_data = copy.deepcopy(data)
|
||||
try:
|
||||
user = cached_get_user_by_email(new_data["user"]["email"])
|
||||
new_data["user"]["id"] = user.id
|
||||
del new_data["user"]["email"]
|
||||
except users_models.User.DoesNotExist:
|
||||
pass
|
||||
|
||||
return new_data
|
||||
|
|
|
@ -16,56 +16,62 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.fields import Field, MethodField, DateTimeField
|
||||
from taiga.projects.history import models as history_models
|
||||
from taiga.projects.attachments import models as attachments_models
|
||||
from taiga.projects.notifications import services as notifications_services
|
||||
from taiga.projects.history import services as history_service
|
||||
|
||||
from .fields import (UserRelatedField, HistoryUserField, HistoryDiffField,
|
||||
JsonField, HistoryValuesField, CommentField, FileField)
|
||||
HistoryValuesField, FileField)
|
||||
|
||||
|
||||
class HistoryExportSerializer(serializers.ModelSerializer):
|
||||
class HistoryExportSerializer(serializers.LightSerializer):
|
||||
user = HistoryUserField()
|
||||
diff = HistoryDiffField(required=False)
|
||||
snapshot = JsonField(required=False)
|
||||
values = HistoryValuesField(required=False)
|
||||
comment = CommentField(required=False)
|
||||
delete_comment_date = serializers.DateTimeField(required=False)
|
||||
delete_comment_user = HistoryUserField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = history_models.HistoryEntry
|
||||
exclude = ("id", "comment_html", "key")
|
||||
diff = HistoryDiffField()
|
||||
snapshot = Field()
|
||||
values = HistoryValuesField()
|
||||
comment = Field()
|
||||
delete_comment_date = DateTimeField()
|
||||
delete_comment_user = HistoryUserField()
|
||||
comment_versions = Field()
|
||||
created_at = DateTimeField()
|
||||
edit_comment_date = DateTimeField()
|
||||
is_hidden = Field()
|
||||
is_snapshot = Field()
|
||||
type = Field()
|
||||
|
||||
|
||||
class HistoryExportSerializerMixin(serializers.ModelSerializer):
|
||||
history = serializers.SerializerMethodField("get_history")
|
||||
class HistoryExportSerializerMixin(serializers.LightSerializer):
|
||||
history = MethodField("get_history")
|
||||
|
||||
def get_history(self, obj):
|
||||
history_qs = history_service.get_history_queryset_by_model_instance(obj,
|
||||
types=(history_models.HistoryType.change, history_models.HistoryType.create,))
|
||||
history_qs = history_service.get_history_queryset_by_model_instance(
|
||||
obj,
|
||||
types=(history_models.HistoryType.change, history_models.HistoryType.create,)
|
||||
)
|
||||
|
||||
return HistoryExportSerializer(history_qs, many=True).data
|
||||
|
||||
|
||||
class AttachmentExportSerializer(serializers.ModelSerializer):
|
||||
owner = UserRelatedField(required=False)
|
||||
class AttachmentExportSerializer(serializers.LightSerializer):
|
||||
owner = UserRelatedField()
|
||||
attached_file = FileField()
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = attachments_models.Attachment
|
||||
exclude = ('id', 'content_type', 'object_id', 'project')
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
description = Field()
|
||||
is_deprecated = Field()
|
||||
name = Field()
|
||||
order = Field()
|
||||
sha1 = Field()
|
||||
size = Field()
|
||||
|
||||
|
||||
class AttachmentExportSerializerMixin(serializers.ModelSerializer):
|
||||
attachments = serializers.SerializerMethodField("get_attachments")
|
||||
class AttachmentExportSerializerMixin(serializers.LightSerializer):
|
||||
attachments = MethodField()
|
||||
|
||||
def get_attachments(self, obj):
|
||||
content_type = ContentType.objects.get_for_model(obj.__class__)
|
||||
|
@ -74,8 +80,8 @@ class AttachmentExportSerializerMixin(serializers.ModelSerializer):
|
|||
return AttachmentExportSerializer(attachments_qs, many=True).data
|
||||
|
||||
|
||||
class CustomAttributesValuesExportSerializerMixin(serializers.ModelSerializer):
|
||||
custom_attributes_values = serializers.SerializerMethodField("get_custom_attributes_values")
|
||||
class CustomAttributesValuesExportSerializerMixin(serializers.LightSerializer):
|
||||
custom_attributes_values = MethodField("get_custom_attributes_values")
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
raise NotImplementedError()
|
||||
|
@ -85,13 +91,13 @@ class CustomAttributesValuesExportSerializerMixin(serializers.ModelSerializer):
|
|||
ret = {}
|
||||
for attr in custom_attributes:
|
||||
value = values.get(str(attr["id"]), None)
|
||||
if value is not None:
|
||||
if value is not None:
|
||||
ret[attr["name"]] = value
|
||||
|
||||
return ret
|
||||
|
||||
try:
|
||||
values = obj.custom_attributes_values.attributes_values
|
||||
values = obj.custom_attributes_values.attributes_values
|
||||
custom_attributes = self.custom_attributes_queryset(obj.project)
|
||||
|
||||
return _use_name_instead_id_as_key_in_custom_attributes_values(custom_attributes, values)
|
||||
|
@ -99,43 +105,8 @@ class CustomAttributesValuesExportSerializerMixin(serializers.ModelSerializer):
|
|||
return None
|
||||
|
||||
|
||||
class WatcheableObjectModelSerializerMixin(serializers.ModelSerializer):
|
||||
watchers = UserRelatedField(many=True, required=False)
|
||||
class WatcheableObjectLightSerializerMixin(serializers.LightSerializer):
|
||||
watchers = MethodField()
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._watchers_field = self.base_fields.pop("watchers", None)
|
||||
super(WatcheableObjectModelSerializerMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
"""
|
||||
watchers is not a field from the model so we need to do some magic to make it work like a normal field
|
||||
It's supposed to be represented as an email list but internally it's treated like notifications.Watched instances
|
||||
"""
|
||||
|
||||
def restore_object(self, attrs, instance=None):
|
||||
watcher_field = self.fields.pop("watchers", None)
|
||||
instance = super(WatcheableObjectModelSerializerMixin, self).restore_object(attrs, instance)
|
||||
self._watchers = self.init_data.get("watchers", [])
|
||||
return instance
|
||||
|
||||
def save_watchers(self):
|
||||
new_watcher_emails = set(self._watchers)
|
||||
old_watcher_emails = set(self.object.get_watchers().values_list("email", flat=True))
|
||||
adding_watcher_emails = list(new_watcher_emails.difference(old_watcher_emails))
|
||||
removing_watcher_emails = list(old_watcher_emails.difference(new_watcher_emails))
|
||||
|
||||
User = get_user_model()
|
||||
adding_users = User.objects.filter(email__in=adding_watcher_emails)
|
||||
removing_users = User.objects.filter(email__in=removing_watcher_emails)
|
||||
|
||||
for user in adding_users:
|
||||
notifications_services.add_watcher(self.object, user)
|
||||
|
||||
for user in removing_users:
|
||||
notifications_services.remove_watcher(self.object, user)
|
||||
|
||||
self.object.watchers = [user.email for user in self.object.get_watchers()]
|
||||
|
||||
def to_native(self, obj):
|
||||
ret = super(WatcheableObjectModelSerializerMixin, self).to_native(obj)
|
||||
ret["watchers"] = [user.email for user in obj.get_watchers()]
|
||||
return ret
|
||||
def get_watchers(self, obj):
|
||||
return [user.email for user in obj.get_watchers()]
|
||||
|
|
|
@ -16,235 +16,201 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import copy
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.fields import JsonField, PgArrayField
|
||||
from taiga.base.fields import Field, DateTimeField, MethodField
|
||||
|
||||
from taiga.projects import models as projects_models
|
||||
from taiga.projects.custom_attributes import models as custom_attributes_models
|
||||
from taiga.projects.userstories import models as userstories_models
|
||||
from taiga.projects.tasks import models as tasks_models
|
||||
from taiga.projects.issues import models as issues_models
|
||||
from taiga.projects.milestones import models as milestones_models
|
||||
from taiga.projects.wiki import models as wiki_models
|
||||
from taiga.projects.history import models as history_models
|
||||
from taiga.projects.attachments import models as attachments_models
|
||||
from taiga.timeline import models as timeline_models
|
||||
from taiga.users import models as users_models
|
||||
from taiga.projects.votes import services as votes_service
|
||||
|
||||
from .fields import (FileField, RelatedNoneSafeField, UserRelatedField,
|
||||
UserPkField, CommentField, ProjectRelatedField,
|
||||
HistoryUserField, HistoryValuesField, HistoryDiffField,
|
||||
TimelineDataField, ContentTypeField)
|
||||
from .fields import (FileField, UserRelatedField, TimelineDataField,
|
||||
ContentTypeField, SlugRelatedField)
|
||||
from .mixins import (HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin,
|
||||
CustomAttributesValuesExportSerializerMixin,
|
||||
WatcheableObjectModelSerializerMixin)
|
||||
WatcheableObjectLightSerializerMixin)
|
||||
from .cache import (_custom_tasks_attributes_cache,
|
||||
_custom_userstories_attributes_cache,
|
||||
_custom_epics_attributes_cache,
|
||||
_custom_issues_attributes_cache)
|
||||
|
||||
|
||||
class PointsExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.Points
|
||||
exclude = ('id', 'project')
|
||||
class RelatedExportSerializer(serializers.LightSerializer):
|
||||
def to_value(self, value):
|
||||
if hasattr(value, 'all'):
|
||||
return super().to_value(value.all())
|
||||
return super().to_value(value)
|
||||
|
||||
|
||||
class UserStoryStatusExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.UserStoryStatus
|
||||
exclude = ('id', 'project')
|
||||
class PointsExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
order = Field()
|
||||
value = Field()
|
||||
|
||||
|
||||
class TaskStatusExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.TaskStatus
|
||||
exclude = ('id', 'project')
|
||||
class UserStoryStatusExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
slug = Field()
|
||||
order = Field()
|
||||
is_closed = Field()
|
||||
is_archived = Field()
|
||||
color = Field()
|
||||
wip_limit = Field()
|
||||
|
||||
|
||||
class IssueStatusExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.IssueStatus
|
||||
exclude = ('id', 'project')
|
||||
class EpicStatusExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
slug = Field()
|
||||
order = Field()
|
||||
is_closed = Field()
|
||||
color = Field()
|
||||
|
||||
|
||||
class PriorityExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.Priority
|
||||
exclude = ('id', 'project')
|
||||
class TaskStatusExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
slug = Field()
|
||||
order = Field()
|
||||
is_closed = Field()
|
||||
color = Field()
|
||||
|
||||
|
||||
class SeverityExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.Severity
|
||||
exclude = ('id', 'project')
|
||||
class IssueStatusExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
slug = Field()
|
||||
order = Field()
|
||||
is_closed = Field()
|
||||
color = Field()
|
||||
|
||||
|
||||
class IssueTypeExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = projects_models.IssueType
|
||||
exclude = ('id', 'project')
|
||||
class PriorityExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
order = Field()
|
||||
color = Field()
|
||||
|
||||
|
||||
class RoleExportSerializer(serializers.ModelSerializer):
|
||||
permissions = PgArrayField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = users_models.Role
|
||||
exclude = ('id', 'project')
|
||||
class SeverityExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
order = Field()
|
||||
color = Field()
|
||||
|
||||
|
||||
class UserStoryCustomAttributeExportSerializer(serializers.ModelSerializer):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.UserStoryCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
class IssueTypeExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
order = Field()
|
||||
color = Field()
|
||||
|
||||
|
||||
class TaskCustomAttributeExportSerializer(serializers.ModelSerializer):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.TaskCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
class RoleExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
slug = Field()
|
||||
order = Field()
|
||||
computable = Field()
|
||||
permissions = Field()
|
||||
|
||||
|
||||
class IssueCustomAttributeExportSerializer(serializers.ModelSerializer):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.IssueCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
class EpicCustomAttributesExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
description = Field()
|
||||
type = Field()
|
||||
order = Field()
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
|
||||
|
||||
class BaseCustomAttributesValuesExportSerializer(serializers.ModelSerializer):
|
||||
attributes_values = JsonField(source="attributes_values",required=True)
|
||||
_custom_attribute_model = None
|
||||
_container_field = None
|
||||
class UserStoryCustomAttributeExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
description = Field()
|
||||
type = Field()
|
||||
order = Field()
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
|
||||
class Meta:
|
||||
exclude = ("id",)
|
||||
|
||||
def validate_attributes_values(self, attrs, source):
|
||||
# values must be a dict
|
||||
data_values = attrs.get("attributes_values", None)
|
||||
if self.object:
|
||||
data_values = (data_values or self.object.attributes_values)
|
||||
class TaskCustomAttributeExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
description = Field()
|
||||
type = Field()
|
||||
order = Field()
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
|
||||
if type(data_values) is not dict:
|
||||
raise ValidationError(_("Invalid content. It must be {\"key\": \"value\",...}"))
|
||||
|
||||
# Values keys must be in the container object project
|
||||
data_container = attrs.get(self._container_field, None)
|
||||
if data_container:
|
||||
project_id = data_container.project_id
|
||||
elif self.object:
|
||||
project_id = getattr(self.object, self._container_field).project_id
|
||||
else:
|
||||
project_id = None
|
||||
class IssueCustomAttributeExportSerializer(RelatedExportSerializer):
|
||||
name = Field()
|
||||
description = Field()
|
||||
type = Field()
|
||||
order = Field()
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
|
||||
values_ids = list(data_values.keys())
|
||||
qs = self._custom_attribute_model.objects.filter(project=project_id,
|
||||
id__in=values_ids)
|
||||
if qs.count() != len(values_ids):
|
||||
raise ValidationError(_("It contain invalid custom fields."))
|
||||
|
||||
return attrs
|
||||
class BaseCustomAttributesValuesExportSerializer(RelatedExportSerializer):
|
||||
attributes_values = Field(required=True)
|
||||
|
||||
|
||||
class UserStoryCustomAttributesValuesExportSerializer(BaseCustomAttributesValuesExportSerializer):
|
||||
_custom_attribute_model = custom_attributes_models.UserStoryCustomAttribute
|
||||
_container_model = "userstories.UserStory"
|
||||
_container_field = "user_story"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportSerializer.Meta):
|
||||
model = custom_attributes_models.UserStoryCustomAttributesValues
|
||||
user_story = Field(attr="user_story.id")
|
||||
|
||||
|
||||
class TaskCustomAttributesValuesExportSerializer(BaseCustomAttributesValuesExportSerializer):
|
||||
_custom_attribute_model = custom_attributes_models.TaskCustomAttribute
|
||||
_container_field = "task"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportSerializer.Meta):
|
||||
model = custom_attributes_models.TaskCustomAttributesValues
|
||||
task = Field(attr="task.id")
|
||||
|
||||
|
||||
class IssueCustomAttributesValuesExportSerializer(BaseCustomAttributesValuesExportSerializer):
|
||||
_custom_attribute_model = custom_attributes_models.IssueCustomAttribute
|
||||
_container_field = "issue"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportSerializer.Meta):
|
||||
model = custom_attributes_models.IssueCustomAttributesValues
|
||||
issue = Field(attr="issue.id")
|
||||
|
||||
|
||||
class MembershipExportSerializer(serializers.ModelSerializer):
|
||||
user = UserRelatedField(required=False)
|
||||
role = ProjectRelatedField(slug_field="name")
|
||||
invited_by = UserRelatedField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = projects_models.Membership
|
||||
exclude = ('id', 'project', 'token')
|
||||
|
||||
def full_clean(self, instance):
|
||||
return instance
|
||||
class MembershipExportSerializer(RelatedExportSerializer):
|
||||
user = UserRelatedField()
|
||||
role = SlugRelatedField(slug_field="name")
|
||||
invited_by = UserRelatedField()
|
||||
is_admin = Field()
|
||||
email = Field()
|
||||
created_at = DateTimeField()
|
||||
invitation_extra_text = Field()
|
||||
user_order = Field()
|
||||
|
||||
|
||||
class RolePointsExportSerializer(serializers.ModelSerializer):
|
||||
role = ProjectRelatedField(slug_field="name")
|
||||
points = ProjectRelatedField(slug_field="name")
|
||||
|
||||
class Meta:
|
||||
model = userstories_models.RolePoints
|
||||
exclude = ('id', 'user_story')
|
||||
class RolePointsExportSerializer(RelatedExportSerializer):
|
||||
role = SlugRelatedField(slug_field="name")
|
||||
points = SlugRelatedField(slug_field="name")
|
||||
|
||||
|
||||
class MilestoneExportSerializer(WatcheableObjectModelSerializerMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
estimated_start = serializers.DateField(required=False)
|
||||
estimated_finish = serializers.DateField(required=False)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
project = kwargs.pop('project', None)
|
||||
super(MilestoneExportSerializer, self).__init__(*args, **kwargs)
|
||||
if project:
|
||||
self.project = project
|
||||
|
||||
def validate_name(self, attrs, source):
|
||||
"""
|
||||
Check the milestone name is not duplicated in the project
|
||||
"""
|
||||
name = attrs[source]
|
||||
qs = self.project.milestones.filter(name=name)
|
||||
if qs.exists():
|
||||
raise serializers.ValidationError(_("Name duplicated for the project"))
|
||||
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
model = milestones_models.Milestone
|
||||
exclude = ('id', 'project')
|
||||
class MilestoneExportSerializer(WatcheableObjectLightSerializerMixin, RelatedExportSerializer):
|
||||
name = Field()
|
||||
owner = UserRelatedField()
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
estimated_start = Field()
|
||||
estimated_finish = Field()
|
||||
slug = Field()
|
||||
closed = Field()
|
||||
disponibility = Field()
|
||||
order = Field()
|
||||
|
||||
|
||||
class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin, WatcheableObjectModelSerializerMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
user_story = ProjectRelatedField(slug_field="ref", required=False)
|
||||
milestone = ProjectRelatedField(slug_field="name", required=False)
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = tasks_models.Task
|
||||
exclude = ('id', 'project')
|
||||
class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin,
|
||||
HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin,
|
||||
WatcheableObjectLightSerializerMixin,
|
||||
RelatedExportSerializer):
|
||||
owner = UserRelatedField()
|
||||
status = SlugRelatedField(slug_field="name")
|
||||
user_story = SlugRelatedField(slug_field="ref")
|
||||
milestone = SlugRelatedField(slug_field="name")
|
||||
assigned_to = UserRelatedField()
|
||||
modified_date = DateTimeField()
|
||||
created_date = DateTimeField()
|
||||
finished_date = DateTimeField()
|
||||
ref = Field()
|
||||
subject = Field()
|
||||
us_order = Field()
|
||||
taskboard_order = Field()
|
||||
description = Field()
|
||||
is_iocaine = Field()
|
||||
external_reference = Field()
|
||||
version = Field()
|
||||
blocked_note = Field()
|
||||
is_blocked = Field()
|
||||
tags = Field()
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_tasks_attributes_cache:
|
||||
|
@ -252,41 +218,108 @@ class TaskExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryE
|
|||
return _custom_tasks_attributes_cache[project.id]
|
||||
|
||||
|
||||
class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin, WatcheableObjectModelSerializerMixin):
|
||||
role_points = RolePointsExportSerializer(many=True, required=False)
|
||||
owner = UserRelatedField(required=False)
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
milestone = ProjectRelatedField(slug_field="name", required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
generated_from_issue = ProjectRelatedField(slug_field="ref", required=False)
|
||||
|
||||
class Meta:
|
||||
model = userstories_models.UserStory
|
||||
exclude = ('id', 'project', 'points', 'tasks')
|
||||
class UserStoryExportSerializer(CustomAttributesValuesExportSerializerMixin,
|
||||
HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin,
|
||||
WatcheableObjectLightSerializerMixin,
|
||||
RelatedExportSerializer):
|
||||
role_points = RolePointsExportSerializer(many=True)
|
||||
owner = UserRelatedField()
|
||||
assigned_to = UserRelatedField()
|
||||
status = SlugRelatedField(slug_field="name")
|
||||
milestone = SlugRelatedField(slug_field="name")
|
||||
modified_date = DateTimeField()
|
||||
created_date = DateTimeField()
|
||||
finish_date = DateTimeField()
|
||||
generated_from_issue = SlugRelatedField(slug_field="ref")
|
||||
ref = Field()
|
||||
is_closed = Field()
|
||||
backlog_order = Field()
|
||||
sprint_order = Field()
|
||||
kanban_order = Field()
|
||||
subject = Field()
|
||||
description = Field()
|
||||
client_requirement = Field()
|
||||
team_requirement = Field()
|
||||
external_reference = Field()
|
||||
tribe_gig = Field()
|
||||
version = Field()
|
||||
blocked_note = Field()
|
||||
is_blocked = Field()
|
||||
tags = Field()
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_userstories_attributes_cache:
|
||||
_custom_userstories_attributes_cache[project.id] = list(project.userstorycustomattributes.all().values('id', 'name'))
|
||||
_custom_userstories_attributes_cache[project.id] = list(
|
||||
project.userstorycustomattributes.all().values('id', 'name')
|
||||
)
|
||||
return _custom_userstories_attributes_cache[project.id]
|
||||
|
||||
|
||||
class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin, HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin, WatcheableObjectModelSerializerMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
priority = ProjectRelatedField(slug_field="name")
|
||||
severity = ProjectRelatedField(slug_field="name")
|
||||
type = ProjectRelatedField(slug_field="name")
|
||||
milestone = ProjectRelatedField(slug_field="name", required=False)
|
||||
votes = serializers.SerializerMethodField("get_votes")
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
class EpicRelatedUserStoryExportSerializer(RelatedExportSerializer):
|
||||
user_story = SlugRelatedField(slug_field="ref")
|
||||
order = Field()
|
||||
|
||||
class Meta:
|
||||
model = issues_models.Issue
|
||||
exclude = ('id', 'project')
|
||||
|
||||
class EpicExportSerializer(CustomAttributesValuesExportSerializerMixin,
|
||||
HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin,
|
||||
WatcheableObjectLightSerializerMixin,
|
||||
RelatedExportSerializer):
|
||||
ref = Field()
|
||||
owner = UserRelatedField()
|
||||
status = SlugRelatedField(slug_field="name")
|
||||
epics_order = Field()
|
||||
created_date = DateTimeField()
|
||||
modified_date = DateTimeField()
|
||||
subject = Field()
|
||||
description = Field()
|
||||
color = Field()
|
||||
assigned_to = UserRelatedField()
|
||||
client_requirement = Field()
|
||||
team_requirement = Field()
|
||||
version = Field()
|
||||
blocked_note = Field()
|
||||
is_blocked = Field()
|
||||
tags = Field()
|
||||
related_user_stories = MethodField()
|
||||
|
||||
def get_related_user_stories(self, obj):
|
||||
return EpicRelatedUserStoryExportSerializer(obj.relateduserstory_set.all(), many=True).data
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_epics_attributes_cache:
|
||||
_custom_epics_attributes_cache[project.id] = list(
|
||||
project.userstorycustomattributes.all().values('id', 'name')
|
||||
)
|
||||
return _custom_epics_attributes_cache[project.id]
|
||||
|
||||
|
||||
class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin,
|
||||
HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin,
|
||||
WatcheableObjectLightSerializerMixin,
|
||||
RelatedExportSerializer):
|
||||
owner = UserRelatedField()
|
||||
status = SlugRelatedField(slug_field="name")
|
||||
assigned_to = UserRelatedField()
|
||||
priority = SlugRelatedField(slug_field="name")
|
||||
severity = SlugRelatedField(slug_field="name")
|
||||
type = SlugRelatedField(slug_field="name")
|
||||
milestone = SlugRelatedField(slug_field="name")
|
||||
votes = MethodField("get_votes")
|
||||
modified_date = DateTimeField()
|
||||
created_date = DateTimeField()
|
||||
finished_date = DateTimeField()
|
||||
|
||||
ref = Field()
|
||||
subject = Field()
|
||||
description = Field()
|
||||
external_reference = Field()
|
||||
version = Field()
|
||||
blocked_note = Field()
|
||||
is_blocked = Field()
|
||||
tags = Field()
|
||||
|
||||
def get_votes(self, obj):
|
||||
return [x.email for x in votes_service.get_voters(obj)]
|
||||
|
@ -297,65 +330,99 @@ class IssueExportSerializer(CustomAttributesValuesExportSerializerMixin, History
|
|||
return _custom_issues_attributes_cache[project.id]
|
||||
|
||||
|
||||
class WikiPageExportSerializer(HistoryExportSerializerMixin, AttachmentExportSerializerMixin,
|
||||
WatcheableObjectModelSerializerMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
last_modifier = UserRelatedField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = wiki_models.WikiPage
|
||||
exclude = ('id', 'project')
|
||||
class WikiPageExportSerializer(HistoryExportSerializerMixin,
|
||||
AttachmentExportSerializerMixin,
|
||||
WatcheableObjectLightSerializerMixin,
|
||||
RelatedExportSerializer):
|
||||
slug = Field()
|
||||
owner = UserRelatedField()
|
||||
last_modifier = UserRelatedField()
|
||||
modified_date = DateTimeField()
|
||||
created_date = DateTimeField()
|
||||
content = Field()
|
||||
version = Field()
|
||||
|
||||
|
||||
class WikiLinkExportSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = wiki_models.WikiLink
|
||||
exclude = ('id', 'project')
|
||||
class WikiLinkExportSerializer(RelatedExportSerializer):
|
||||
title = Field()
|
||||
href = Field()
|
||||
order = Field()
|
||||
|
||||
|
||||
|
||||
class TimelineExportSerializer(serializers.ModelSerializer):
|
||||
class TimelineExportSerializer(RelatedExportSerializer):
|
||||
data = TimelineDataField()
|
||||
data_content_type = ContentTypeField()
|
||||
class Meta:
|
||||
model = timeline_models.Timeline
|
||||
exclude = ('id', 'project', 'namespace', 'object_id', 'content_type')
|
||||
event_type = Field()
|
||||
created = DateTimeField()
|
||||
|
||||
|
||||
class ProjectExportSerializer(WatcheableObjectModelSerializerMixin):
|
||||
logo = FileField(required=False)
|
||||
anon_permissions = PgArrayField(required=False)
|
||||
public_permissions = PgArrayField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
roles = RoleExportSerializer(many=True, required=False)
|
||||
owner = UserRelatedField(required=False)
|
||||
memberships = MembershipExportSerializer(many=True, required=False)
|
||||
points = PointsExportSerializer(many=True, required=False)
|
||||
us_statuses = UserStoryStatusExportSerializer(many=True, required=False)
|
||||
task_statuses = TaskStatusExportSerializer(many=True, required=False)
|
||||
issue_types = IssueTypeExportSerializer(many=True, required=False)
|
||||
issue_statuses = IssueStatusExportSerializer(many=True, required=False)
|
||||
priorities = PriorityExportSerializer(many=True, required=False)
|
||||
severities = SeverityExportSerializer(many=True, required=False)
|
||||
tags_colors = JsonField(required=False)
|
||||
default_points = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_us_status = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_task_status = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_priority = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_severity = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_issue_status = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_issue_type = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
userstorycustomattributes = UserStoryCustomAttributeExportSerializer(many=True, required=False)
|
||||
taskcustomattributes = TaskCustomAttributeExportSerializer(many=True, required=False)
|
||||
issuecustomattributes = IssueCustomAttributeExportSerializer(many=True, required=False)
|
||||
user_stories = UserStoryExportSerializer(many=True, required=False)
|
||||
tasks = TaskExportSerializer(many=True, required=False)
|
||||
milestones = MilestoneExportSerializer(many=True, required=False)
|
||||
issues = IssueExportSerializer(many=True, required=False)
|
||||
wiki_links = WikiLinkExportSerializer(many=True, required=False)
|
||||
wiki_pages = WikiPageExportSerializer(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = projects_models.Project
|
||||
exclude = ('id', 'creation_template', 'members')
|
||||
class ProjectExportSerializer(WatcheableObjectLightSerializerMixin):
|
||||
name = Field()
|
||||
slug = Field()
|
||||
description = Field()
|
||||
created_date = DateTimeField()
|
||||
logo = FileField()
|
||||
total_milestones = Field()
|
||||
total_story_points = Field()
|
||||
is_epics_activated = Field()
|
||||
is_backlog_activated = Field()
|
||||
is_kanban_activated = Field()
|
||||
is_wiki_activated = Field()
|
||||
is_issues_activated = Field()
|
||||
videoconferences = Field()
|
||||
videoconferences_extra_data = Field()
|
||||
creation_template = SlugRelatedField(slug_field="slug")
|
||||
is_private = Field()
|
||||
is_featured = Field()
|
||||
is_looking_for_people = Field()
|
||||
looking_for_people_note = Field()
|
||||
epics_csv_uuid = Field()
|
||||
userstories_csv_uuid = Field()
|
||||
tasks_csv_uuid = Field()
|
||||
issues_csv_uuid = Field()
|
||||
transfer_token = Field()
|
||||
blocked_code = Field()
|
||||
totals_updated_datetime = DateTimeField()
|
||||
total_fans = Field()
|
||||
total_fans_last_week = Field()
|
||||
total_fans_last_month = Field()
|
||||
total_fans_last_year = Field()
|
||||
total_activity = Field()
|
||||
total_activity_last_week = Field()
|
||||
total_activity_last_month = Field()
|
||||
total_activity_last_year = Field()
|
||||
anon_permissions = Field()
|
||||
public_permissions = Field()
|
||||
modified_date = DateTimeField()
|
||||
roles = RoleExportSerializer(many=True)
|
||||
owner = UserRelatedField()
|
||||
memberships = MembershipExportSerializer(many=True)
|
||||
points = PointsExportSerializer(many=True)
|
||||
epic_statuses = EpicStatusExportSerializer(many=True)
|
||||
us_statuses = UserStoryStatusExportSerializer(many=True)
|
||||
task_statuses = TaskStatusExportSerializer(many=True)
|
||||
issue_types = IssueTypeExportSerializer(many=True)
|
||||
issue_statuses = IssueStatusExportSerializer(many=True)
|
||||
priorities = PriorityExportSerializer(many=True)
|
||||
severities = SeverityExportSerializer(many=True)
|
||||
tags_colors = Field()
|
||||
default_points = SlugRelatedField(slug_field="name")
|
||||
default_epic_status = SlugRelatedField(slug_field="name")
|
||||
default_us_status = SlugRelatedField(slug_field="name")
|
||||
default_task_status = SlugRelatedField(slug_field="name")
|
||||
default_priority = SlugRelatedField(slug_field="name")
|
||||
default_severity = SlugRelatedField(slug_field="name")
|
||||
default_issue_status = SlugRelatedField(slug_field="name")
|
||||
default_issue_type = SlugRelatedField(slug_field="name")
|
||||
epiccustomattributes = EpicCustomAttributesExportSerializer(many=True)
|
||||
userstorycustomattributes = UserStoryCustomAttributeExportSerializer(many=True)
|
||||
taskcustomattributes = TaskCustomAttributeExportSerializer(many=True)
|
||||
issuecustomattributes = IssueCustomAttributeExportSerializer(many=True)
|
||||
epics = EpicExportSerializer(many=True)
|
||||
user_stories = UserStoryExportSerializer(many=True)
|
||||
tasks = TaskExportSerializer(many=True)
|
||||
milestones = MilestoneExportSerializer(many=True)
|
||||
issues = IssueExportSerializer(many=True)
|
||||
wiki_links = WikiLinkExportSerializer(many=True)
|
||||
wiki_pages = WikiPageExportSerializer(many=True)
|
||||
tags = Field()
|
||||
|
|
|
@ -19,49 +19,48 @@
|
|||
# This makes all code that import services works and
|
||||
# is not the baddest practice ;)
|
||||
|
||||
import base64
|
||||
import gc
|
||||
import os
|
||||
|
||||
from django.core.files.storage import default_storage
|
||||
|
||||
from taiga.base.utils import json
|
||||
from taiga.base.fields import MethodField
|
||||
from taiga.timeline.service import get_project_timeline
|
||||
from taiga.base.api.fields import get_component
|
||||
|
||||
from .. import serializers
|
||||
|
||||
|
||||
def render_project(project, outfile, chunk_size = 8190):
|
||||
def render_project(project, outfile, chunk_size=8190):
|
||||
serializer = serializers.ProjectExportSerializer(project)
|
||||
outfile.write(b'{\n')
|
||||
|
||||
first_field = True
|
||||
for field_name in serializer.fields.keys():
|
||||
for field_name in serializer._field_map.keys():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_field:
|
||||
outfile.write(b",\n")
|
||||
else:
|
||||
first_field = False
|
||||
|
||||
field = serializer.fields.get(field_name)
|
||||
field.initialize(parent=serializer, field_name=field_name)
|
||||
field = serializer._field_map.get(field_name)
|
||||
# field.initialize(parent=serializer, field_name=field_name)
|
||||
|
||||
# These four "special" fields hava attachments so we use them in a special way
|
||||
if field_name in ["wiki_pages", "user_stories", "tasks", "issues"]:
|
||||
if field_name in ["wiki_pages", "user_stories", "tasks", "issues", "epics"]:
|
||||
value = get_component(project, field_name)
|
||||
if field_name != "wiki_pages":
|
||||
value = value.select_related('owner', 'status', 'milestone', 'project', 'assigned_to', 'custom_attributes_values')
|
||||
value = value.select_related('owner', 'status',
|
||||
'project', 'assigned_to',
|
||||
'custom_attributes_values')
|
||||
|
||||
if field_name in ["user_stories", "tasks", "issues"]:
|
||||
value = value.select_related('milestone')
|
||||
|
||||
if field_name == "issues":
|
||||
value = value.select_related('severity', 'priority', 'type')
|
||||
value = value.prefetch_related('history_entry', 'attachments')
|
||||
|
||||
outfile.write('"{}": [\n'.format(field_name).encode())
|
||||
|
||||
attachments_field = field.fields.pop("attachments", None)
|
||||
if attachments_field:
|
||||
attachments_field.initialize(parent=field, field_name="attachments")
|
||||
|
||||
first_item = True
|
||||
for item in value.iterator():
|
||||
# Avoid writing "," in the last element
|
||||
|
@ -70,47 +69,18 @@ def render_project(project, outfile, chunk_size = 8190):
|
|||
else:
|
||||
first_item = False
|
||||
|
||||
|
||||
dumped_value = json.dumps(field.to_native(item))
|
||||
writing_value = dumped_value[:-1]+ ',\n "attachments": [\n'
|
||||
outfile.write(writing_value.encode())
|
||||
|
||||
first_attachment = True
|
||||
for attachment in item.attachments.iterator():
|
||||
# Avoid writing "," in the last element
|
||||
if not first_attachment:
|
||||
outfile.write(b",\n")
|
||||
else:
|
||||
first_attachment = False
|
||||
|
||||
# Write all the data expect the serialized file
|
||||
attachment_serializer = serializers.AttachmentExportSerializer(instance=attachment)
|
||||
attached_file_serializer = attachment_serializer.fields.pop("attached_file")
|
||||
dumped_value = json.dumps(attachment_serializer.data)
|
||||
dumped_value = dumped_value[:-1] + ',\n "attached_file":{\n "data":"'
|
||||
outfile.write(dumped_value.encode())
|
||||
|
||||
# We write the attached_files by chunks so the memory used is not increased
|
||||
attachment_file = attachment.attached_file
|
||||
if default_storage.exists(attachment_file.name):
|
||||
with default_storage.open(attachment_file.name) as f:
|
||||
while True:
|
||||
bin_data = f.read(chunk_size)
|
||||
if not bin_data:
|
||||
break
|
||||
|
||||
b64_data = base64.b64encode(bin_data)
|
||||
outfile.write(b64_data)
|
||||
|
||||
outfile.write('", \n "name":"{}"}}\n}}'.format(
|
||||
os.path.basename(attachment_file.name)).encode())
|
||||
|
||||
outfile.write(b']}')
|
||||
field.many = False
|
||||
dumped_value = json.dumps(field.to_value(item))
|
||||
outfile.write(dumped_value.encode())
|
||||
outfile.flush()
|
||||
gc.collect()
|
||||
outfile.write(b']')
|
||||
else:
|
||||
value = field.field_to_native(project, field_name)
|
||||
if isinstance(field, MethodField):
|
||||
value = field.as_getter(field_name, serializers.ProjectExportSerializer)(serializer, project)
|
||||
else:
|
||||
attr = getattr(project, field_name)
|
||||
value = field.to_value(attr)
|
||||
outfile.write('"{}": {}'.format(field_name, json.dumps(value)).encode())
|
||||
|
||||
# Generate the timeline
|
||||
|
@ -127,4 +97,3 @@ def render_project(project, outfile, chunk_size = 8190):
|
|||
outfile.write(dumped_value.encode())
|
||||
|
||||
outfile.write(b']}\n')
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ from taiga.timeline.service import build_project_namespace
|
|||
from taiga.users import services as users_service
|
||||
|
||||
from .. import exceptions as err
|
||||
from .. import serializers
|
||||
from .. import validators
|
||||
|
||||
|
||||
########################################################################
|
||||
|
@ -80,23 +80,29 @@ def store_project(data):
|
|||
excluded_fields = [
|
||||
"default_points", "default_us_status", "default_task_status",
|
||||
"default_priority", "default_severity", "default_issue_status",
|
||||
"default_issue_type", "memberships", "points", "us_statuses",
|
||||
"task_statuses", "issue_statuses", "priorities", "severities",
|
||||
"issue_types", "userstorycustomattributes", "taskcustomattributes",
|
||||
"issuecustomattributes", "roles", "milestones", "wiki_pages",
|
||||
"wiki_links", "notify_policies", "user_stories", "issues", "tasks",
|
||||
"default_issue_type", "default_epic_status",
|
||||
"memberships", "points",
|
||||
"epic_statuses", "us_statuses", "task_statuses", "issue_statuses",
|
||||
"priorities", "severities",
|
||||
"issue_types",
|
||||
"epiccustomattributes", "userstorycustomattributes",
|
||||
"taskcustomattributes", "issuecustomattributes",
|
||||
"roles", "milestones",
|
||||
"wiki_pages", "wiki_links",
|
||||
"notify_policies",
|
||||
"epics", "user_stories", "issues", "tasks",
|
||||
"is_featured"
|
||||
]
|
||||
if key not in excluded_fields:
|
||||
project_data[key] = value
|
||||
|
||||
serialized = serializers.ProjectExportSerializer(data=project_data)
|
||||
if serialized.is_valid():
|
||||
serialized.object._importing = True
|
||||
serialized.object.save()
|
||||
serialized.save_watchers()
|
||||
return serialized
|
||||
add_errors("project", serialized.errors)
|
||||
validator = validators.ProjectExportValidator(data=project_data)
|
||||
if validator.is_valid():
|
||||
validator.object._importing = True
|
||||
validator.object.save()
|
||||
validator.save_watchers()
|
||||
return validator
|
||||
add_errors("project", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -133,54 +139,55 @@ def _store_custom_attributes_values(obj, data_values, obj_field, serializer_clas
|
|||
|
||||
|
||||
def _store_attachment(project, obj, attachment):
|
||||
serialized = serializers.AttachmentExportSerializer(data=attachment)
|
||||
if serialized.is_valid():
|
||||
serialized.object.content_type = ContentType.objects.get_for_model(obj.__class__)
|
||||
serialized.object.object_id = obj.id
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object.size = serialized.object.attached_file.size
|
||||
serialized.object.name = os.path.basename(serialized.object.attached_file.name)
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("attachments", serialized.errors)
|
||||
return serialized
|
||||
validator = validators.AttachmentExportValidator(data=attachment)
|
||||
if validator.is_valid():
|
||||
validator.object.content_type = ContentType.objects.get_for_model(obj.__class__)
|
||||
validator.object.object_id = obj.id
|
||||
validator.object.project = project
|
||||
if validator.object.owner is None:
|
||||
validator.object.owner = validator.object.project.owner
|
||||
validator.object._importing = True
|
||||
validator.object.size = validator.object.attached_file.size
|
||||
validator.object.name = os.path.basename(validator.object.attached_file.name)
|
||||
validator.save()
|
||||
return validator
|
||||
add_errors("attachments", validator.errors)
|
||||
return validator
|
||||
|
||||
|
||||
def _store_history(project, obj, history):
|
||||
serialized = serializers.HistoryExportSerializer(data=history, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.key = make_key_from_model_object(obj)
|
||||
if serialized.object.diff is None:
|
||||
serialized.object.diff = []
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("history", serialized.errors)
|
||||
return serialized
|
||||
validator = validators.HistoryExportValidator(data=history, context={"project": project})
|
||||
if validator.is_valid():
|
||||
validator.object.key = make_key_from_model_object(obj)
|
||||
if validator.object.diff is None:
|
||||
validator.object.diff = []
|
||||
validator.object.project_id = project.id
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
return validator
|
||||
add_errors("history", validator.errors)
|
||||
return validator
|
||||
|
||||
|
||||
## ROLES
|
||||
|
||||
def _store_role(project, role):
|
||||
serialized = serializers.RoleExportSerializer(data=role)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("roles", serialized.errors)
|
||||
validator = validators.RoleExportValidator(data=role)
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
return validator
|
||||
add_errors("roles", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_roles(project, data):
|
||||
results = []
|
||||
for role in data.get("roles", []):
|
||||
serialized = _store_role(project, role)
|
||||
if serialized:
|
||||
results.append(serialized)
|
||||
validator = _store_role(project, role)
|
||||
if validator:
|
||||
results.append(validator)
|
||||
|
||||
return results
|
||||
|
||||
|
@ -188,17 +195,17 @@ def store_roles(project, data):
|
|||
## MEMGERSHIPS
|
||||
|
||||
def _store_membership(project, membership):
|
||||
serialized = serializers.MembershipExportSerializer(data=membership, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.object.token = str(uuid.uuid1())
|
||||
serialized.object.user = find_invited_user(serialized.object.email,
|
||||
default=serialized.object.user)
|
||||
serialized.save()
|
||||
return serialized
|
||||
validator = validators.MembershipExportValidator(data=membership, context={"project": project})
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object._importing = True
|
||||
validator.object.token = str(uuid.uuid1())
|
||||
validator.object.user = find_invited_user(validator.object.email,
|
||||
default=validator.object.user)
|
||||
validator.save()
|
||||
return validator
|
||||
|
||||
add_errors("memberships", serialized.errors)
|
||||
add_errors("memberships", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -212,13 +219,14 @@ def store_memberships(project, data):
|
|||
## PROJECT ATTRIBUTES
|
||||
|
||||
def _store_project_attribute_value(project, data, field, serializer):
|
||||
serialized = serializer(data=data)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized.object
|
||||
add_errors(field, serialized.errors)
|
||||
validator = serializer(data=data)
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
return validator.object
|
||||
|
||||
add_errors(field, validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -238,10 +246,10 @@ def store_default_project_attributes_values(project, data):
|
|||
else:
|
||||
value = related.all().first()
|
||||
setattr(project, field, value)
|
||||
|
||||
helper(project, "default_points", project.points, data)
|
||||
helper(project, "default_issue_type", project.issue_types, data)
|
||||
helper(project, "default_issue_status", project.issue_statuses, data)
|
||||
helper(project, "default_epic_status", project.epic_statuses, data)
|
||||
helper(project, "default_us_status", project.us_statuses, data)
|
||||
helper(project, "default_task_status", project.task_statuses, data)
|
||||
helper(project, "default_priority", project.priorities, data)
|
||||
|
@ -253,13 +261,13 @@ def store_default_project_attributes_values(project, data):
|
|||
## CUSTOM ATTRIBUTES
|
||||
|
||||
def _store_custom_attribute(project, data, field, serializer):
|
||||
serialized = serializer(data=data)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized.object
|
||||
add_errors(field, serialized.errors)
|
||||
validator = serializer(data=data)
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
return validator.object
|
||||
add_errors(field, validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -273,19 +281,19 @@ def store_custom_attributes(project, data, field, serializer):
|
|||
## MILESTONE
|
||||
|
||||
def store_milestone(project, milestone):
|
||||
serialized = serializers.MilestoneExportSerializer(data=milestone, project=project)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
validator = validators.MilestoneExportValidator(data=milestone, project=project)
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
validator.save_watchers()
|
||||
|
||||
for task_without_us in milestone.get("tasks_without_us", []):
|
||||
task_without_us["user_story"] = None
|
||||
store_task(project, task_without_us)
|
||||
return serialized
|
||||
return validator
|
||||
|
||||
add_errors("milestones", serialized.errors)
|
||||
add_errors("milestones", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -300,73 +308,78 @@ def store_milestones(project, data):
|
|||
## USER STORIES
|
||||
|
||||
def _store_role_point(project, us, role_point):
|
||||
serialized = serializers.RolePointsExportSerializer(data=role_point, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
validator = validators.RolePointsExportValidator(data=role_point, context={"project": project})
|
||||
if validator.is_valid():
|
||||
try:
|
||||
existing_role_point = us.role_points.get(role=serialized.object.role)
|
||||
existing_role_point.points = serialized.object.points
|
||||
existing_role_point = us.role_points.get(role=validator.object.role)
|
||||
existing_role_point.points = validator.object.points
|
||||
existing_role_point.save()
|
||||
return existing_role_point
|
||||
|
||||
except RolePoints.DoesNotExist:
|
||||
serialized.object.user_story = us
|
||||
serialized.save()
|
||||
return serialized.object
|
||||
validator.object.user_story = us
|
||||
validator.save()
|
||||
return validator.object
|
||||
|
||||
add_errors("role_points", serialized.errors)
|
||||
add_errors("role_points", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_user_story(project, data):
|
||||
if "status" not in data and project.default_us_status:
|
||||
data["status"] = project.default_us_status.name
|
||||
|
||||
us_data = {key: value for key, value in data.items() if key not in
|
||||
["role_points", "custom_attributes_values"]}
|
||||
serialized = serializers.UserStoryExportSerializer(data=us_data, context={"project": project})
|
||||
["role_points", "custom_attributes_values"]}
|
||||
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
validator = validators.UserStoryExportValidator(data=us_data, context={"project": project})
|
||||
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
if validator.object.owner is None:
|
||||
validator.object.owner = validator.object.project.owner
|
||||
validator.object._importing = True
|
||||
validator.object._not_notify = True
|
||||
|
||||
if serialized.object.ref:
|
||||
validator.save()
|
||||
validator.save_watchers()
|
||||
|
||||
if validator.object.ref:
|
||||
sequence_name = refs.make_sequence_name(project)
|
||||
if not seq.exists(sequence_name):
|
||||
seq.create(sequence_name)
|
||||
seq.set_max(sequence_name, serialized.object.ref)
|
||||
seq.set_max(sequence_name, validator.object.ref)
|
||||
else:
|
||||
serialized.object.ref, _ = refs.make_reference(serialized.object, project)
|
||||
serialized.object.save()
|
||||
validator.object.ref, _ = refs.make_reference(validator.object, project)
|
||||
validator.object.save()
|
||||
|
||||
for us_attachment in data.get("attachments", []):
|
||||
_store_attachment(project, serialized.object, us_attachment)
|
||||
_store_attachment(project, validator.object, us_attachment)
|
||||
|
||||
for role_point in data.get("role_points", []):
|
||||
_store_role_point(project, serialized.object, role_point)
|
||||
_store_role_point(project, validator.object, role_point)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, serialized.object, history)
|
||||
_store_history(project, validator.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
take_snapshot(validator.object, user=validator.object.owner)
|
||||
|
||||
custom_attributes_values = data.get("custom_attributes_values", None)
|
||||
if custom_attributes_values:
|
||||
custom_attributes = serialized.object.project.userstorycustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
_store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"user_story", serializers.UserStoryCustomAttributesValuesExportSerializer)
|
||||
custom_attributes = validator.object.project.userstorycustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = \
|
||||
_use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes,
|
||||
custom_attributes_values)
|
||||
|
||||
return serialized
|
||||
_store_custom_attributes_values(validator.object, custom_attributes_values,
|
||||
"user_story",
|
||||
validators.UserStoryCustomAttributesValuesExportValidator)
|
||||
|
||||
add_errors("user_stories", serialized.errors)
|
||||
return validator
|
||||
|
||||
add_errors("user_stories", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -378,53 +391,131 @@ def store_user_stories(project, data):
|
|||
return results
|
||||
|
||||
|
||||
## EPICS
|
||||
|
||||
def _store_epic_related_user_story(project, epic, related_user_story):
|
||||
validator = validators.EpicRelatedUserStoryExportValidator(data=related_user_story,
|
||||
context={"project": project})
|
||||
if validator.is_valid():
|
||||
validator.object.epic = epic
|
||||
validator.object.save()
|
||||
return validator.object
|
||||
|
||||
add_errors("epic_related_user_stories", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_epic(project, data):
|
||||
if "status" not in data and project.default_epic_status:
|
||||
data["status"] = project.default_epic_status.name
|
||||
|
||||
validator = validators.EpicExportValidator(data=data, context={"project": project})
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
if validator.object.owner is None:
|
||||
validator.object.owner = validator.object.project.owner
|
||||
validator.object._importing = True
|
||||
validator.object._not_notify = True
|
||||
|
||||
validator.save()
|
||||
validator.save_watchers()
|
||||
|
||||
if validator.object.ref:
|
||||
sequence_name = refs.make_sequence_name(project)
|
||||
if not seq.exists(sequence_name):
|
||||
seq.create(sequence_name)
|
||||
seq.set_max(sequence_name, validator.object.ref)
|
||||
else:
|
||||
validator.object.ref, _ = refs.make_reference(validator.object, project)
|
||||
validator.object.save()
|
||||
|
||||
for epic_attachment in data.get("attachments", []):
|
||||
_store_attachment(project, validator.object, epic_attachment)
|
||||
|
||||
for related_user_story in data.get("related_user_stories", []):
|
||||
_store_epic_related_user_story(project, validator.object, related_user_story)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, validator.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(validator.object, user=validator.object.owner)
|
||||
|
||||
custom_attributes_values = data.get("custom_attributes_values", None)
|
||||
if custom_attributes_values:
|
||||
custom_attributes = validator.object.project.epiccustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = \
|
||||
_use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes,
|
||||
custom_attributes_values)
|
||||
_store_custom_attributes_values(validator.object, custom_attributes_values,
|
||||
"epic",
|
||||
validators.EpicCustomAttributesValuesExportValidator)
|
||||
|
||||
return validator
|
||||
|
||||
add_errors("epics", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
def store_epics(project, data):
|
||||
results = []
|
||||
for epic in data.get("epics", []):
|
||||
epic = store_epic(project, epic)
|
||||
results.append(epic)
|
||||
return results
|
||||
|
||||
|
||||
## TASKS
|
||||
|
||||
def store_task(project, data):
|
||||
if "status" not in data and project.default_task_status:
|
||||
data["status"] = project.default_task_status.name
|
||||
|
||||
serialized = serializers.TaskExportSerializer(data=data, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
validator = validators.TaskExportValidator(data=data, context={"project": project})
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
if validator.object.owner is None:
|
||||
validator.object.owner = validator.object.project.owner
|
||||
validator.object._importing = True
|
||||
validator.object._not_notify = True
|
||||
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
validator.save()
|
||||
validator.save_watchers()
|
||||
|
||||
if serialized.object.ref:
|
||||
if validator.object.ref:
|
||||
sequence_name = refs.make_sequence_name(project)
|
||||
if not seq.exists(sequence_name):
|
||||
seq.create(sequence_name)
|
||||
seq.set_max(sequence_name, serialized.object.ref)
|
||||
seq.set_max(sequence_name, validator.object.ref)
|
||||
else:
|
||||
serialized.object.ref, _ = refs.make_reference(serialized.object, project)
|
||||
serialized.object.save()
|
||||
validator.object.ref, _ = refs.make_reference(validator.object, project)
|
||||
validator.object.save()
|
||||
|
||||
for task_attachment in data.get("attachments", []):
|
||||
_store_attachment(project, serialized.object, task_attachment)
|
||||
_store_attachment(project, validator.object, task_attachment)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, serialized.object, history)
|
||||
_store_history(project, validator.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
take_snapshot(validator.object, user=validator.object.owner)
|
||||
|
||||
custom_attributes_values = data.get("custom_attributes_values", None)
|
||||
if custom_attributes_values:
|
||||
custom_attributes = serialized.object.project.taskcustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
_store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"task", serializers.TaskCustomAttributesValuesExportSerializer)
|
||||
custom_attributes = validator.object.project.taskcustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = \
|
||||
_use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes,
|
||||
custom_attributes_values)
|
||||
|
||||
return serialized
|
||||
_store_custom_attributes_values(validator.object, custom_attributes_values,
|
||||
"task",
|
||||
validators.TaskCustomAttributesValuesExportValidator)
|
||||
|
||||
add_errors("tasks", serialized.errors)
|
||||
return validator
|
||||
|
||||
add_errors("tasks", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -439,7 +530,7 @@ def store_tasks(project, data):
|
|||
## ISSUES
|
||||
|
||||
def store_issue(project, data):
|
||||
serialized = serializers.IssueExportSerializer(data=data, context={"project": project})
|
||||
validator = validators.IssueExportValidator(data=data, context={"project": project})
|
||||
|
||||
if "type" not in data and project.default_issue_type:
|
||||
data["type"] = project.default_issue_type.name
|
||||
|
@ -453,46 +544,48 @@ def store_issue(project, data):
|
|||
if "severity" not in data and project.default_severity:
|
||||
data["severity"] = project.default_severity.name
|
||||
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
if validator.object.owner is None:
|
||||
validator.object.owner = validator.object.project.owner
|
||||
validator.object._importing = True
|
||||
validator.object._not_notify = True
|
||||
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
validator.save()
|
||||
validator.save_watchers()
|
||||
|
||||
if serialized.object.ref:
|
||||
if validator.object.ref:
|
||||
sequence_name = refs.make_sequence_name(project)
|
||||
if not seq.exists(sequence_name):
|
||||
seq.create(sequence_name)
|
||||
seq.set_max(sequence_name, serialized.object.ref)
|
||||
seq.set_max(sequence_name, validator.object.ref)
|
||||
else:
|
||||
serialized.object.ref, _ = refs.make_reference(serialized.object, project)
|
||||
serialized.object.save()
|
||||
validator.object.ref, _ = refs.make_reference(validator.object, project)
|
||||
validator.object.save()
|
||||
|
||||
for attachment in data.get("attachments", []):
|
||||
_store_attachment(project, serialized.object, attachment)
|
||||
_store_attachment(project, validator.object, attachment)
|
||||
|
||||
history_entries = data.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, serialized.object, history)
|
||||
_store_history(project, validator.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
take_snapshot(validator.object, user=validator.object.owner)
|
||||
|
||||
custom_attributes_values = data.get("custom_attributes_values", None)
|
||||
if custom_attributes_values:
|
||||
custom_attributes = serialized.object.project.issuecustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = _use_id_instead_name_as_key_in_custom_attributes_values(
|
||||
custom_attributes, custom_attributes_values)
|
||||
_store_custom_attributes_values(serialized.object, custom_attributes_values,
|
||||
"issue", serializers.IssueCustomAttributesValuesExportSerializer)
|
||||
custom_attributes = validator.object.project.issuecustomattributes.all().values('id', 'name')
|
||||
custom_attributes_values = \
|
||||
_use_id_instead_name_as_key_in_custom_attributes_values(custom_attributes,
|
||||
custom_attributes_values)
|
||||
_store_custom_attributes_values(validator.object, custom_attributes_values,
|
||||
"issue",
|
||||
validators.IssueCustomAttributesValuesExportValidator)
|
||||
|
||||
return serialized
|
||||
return validator
|
||||
|
||||
add_errors("issues", serialized.errors)
|
||||
add_errors("issues", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -507,29 +600,29 @@ def store_issues(project, data):
|
|||
|
||||
def store_wiki_page(project, wiki_page):
|
||||
wiki_page["slug"] = slugify(unidecode(wiki_page.get("slug", "")))
|
||||
serialized = serializers.WikiPageExportSerializer(data=wiki_page)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
if serialized.object.owner is None:
|
||||
serialized.object.owner = serialized.object.project.owner
|
||||
serialized.object._importing = True
|
||||
serialized.object._not_notify = True
|
||||
serialized.save()
|
||||
serialized.save_watchers()
|
||||
validator = validators.WikiPageExportValidator(data=wiki_page)
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
if validator.object.owner is None:
|
||||
validator.object.owner = validator.object.project.owner
|
||||
validator.object._importing = True
|
||||
validator.object._not_notify = True
|
||||
validator.save()
|
||||
validator.save_watchers()
|
||||
|
||||
for attachment in wiki_page.get("attachments", []):
|
||||
_store_attachment(project, serialized.object, attachment)
|
||||
_store_attachment(project, validator.object, attachment)
|
||||
|
||||
history_entries = wiki_page.get("history", [])
|
||||
for history in history_entries:
|
||||
_store_history(project, serialized.object, history)
|
||||
_store_history(project, validator.object, history)
|
||||
|
||||
if not history_entries:
|
||||
take_snapshot(serialized.object, user=serialized.object.owner)
|
||||
take_snapshot(validator.object, user=validator.object.owner)
|
||||
|
||||
return serialized
|
||||
return validator
|
||||
|
||||
add_errors("wiki_pages", serialized.errors)
|
||||
add_errors("wiki_pages", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -543,14 +636,14 @@ def store_wiki_pages(project, data):
|
|||
## WIKI LINKS
|
||||
|
||||
def store_wiki_link(project, wiki_link):
|
||||
serialized = serializers.WikiLinkExportSerializer(data=wiki_link)
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
validator = validators.WikiLinkExportValidator(data=wiki_link)
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
return validator
|
||||
|
||||
add_errors("wiki_links", serialized.errors)
|
||||
add_errors("wiki_links", validator.errors)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -572,17 +665,17 @@ def store_tags_colors(project, data):
|
|||
## TIMELINE
|
||||
|
||||
def _store_timeline_entry(project, timeline):
|
||||
serialized = serializers.TimelineExportSerializer(data=timeline, context={"project": project})
|
||||
if serialized.is_valid():
|
||||
serialized.object.project = project
|
||||
serialized.object.namespace = build_project_namespace(project)
|
||||
serialized.object.object_id = project.id
|
||||
serialized.object.content_type = ContentType.objects.get_for_model(project.__class__)
|
||||
serialized.object._importing = True
|
||||
serialized.save()
|
||||
return serialized
|
||||
add_errors("timeline", serialized.errors)
|
||||
return serialized
|
||||
validator = validators.TimelineExportValidator(data=timeline, context={"project": project})
|
||||
if validator.is_valid():
|
||||
validator.object.project = project
|
||||
validator.object.namespace = build_project_namespace(project)
|
||||
validator.object.object_id = project.id
|
||||
validator.object.content_type = ContentType.objects.get_for_model(project.__class__)
|
||||
validator.object._importing = True
|
||||
validator.save()
|
||||
return validator
|
||||
add_errors("timeline", validator.errors)
|
||||
return validator
|
||||
|
||||
|
||||
def store_timeline_entries(project, data):
|
||||
|
@ -604,8 +697,9 @@ def _validate_if_owner_have_enought_space_to_this_project(owner, data):
|
|||
|
||||
is_private = data.get("is_private", False)
|
||||
total_memberships = len([m for m in data.get("memberships", [])
|
||||
if m.get("email", None) != data["owner"]])
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
if m.get("email", None) != data["owner"]])
|
||||
|
||||
total_memberships = total_memberships + 1 # 1 is the owner
|
||||
(enough_slots, error_message) = users_service.has_available_slot_for_import_new_project(
|
||||
owner,
|
||||
is_private,
|
||||
|
@ -617,13 +711,13 @@ def _validate_if_owner_have_enought_space_to_this_project(owner, data):
|
|||
|
||||
def _create_project_object(data):
|
||||
# Create the project
|
||||
project_serialized = store_project(data)
|
||||
project_validator = store_project(data)
|
||||
|
||||
if not project_serialized:
|
||||
if not project_validator:
|
||||
errors = get_errors(clear=True)
|
||||
raise err.TaigaImportError(_("error importing project data"), None, errors=errors)
|
||||
|
||||
return project_serialized.object if project_serialized else None
|
||||
return project_validator.object if project_validator else None
|
||||
|
||||
|
||||
def _create_membership_for_project_owner(project):
|
||||
|
@ -651,16 +745,17 @@ def _populate_project_object(project, data):
|
|||
# Create memberships
|
||||
store_memberships(project, data)
|
||||
_create_membership_for_project_owner(project)
|
||||
check_if_there_is_some_error(_("error importing memberships"), project)
|
||||
check_if_there_is_some_error(_("error importing memberships"), project)
|
||||
|
||||
# Create project attributes values
|
||||
store_project_attributes_values(project, data, "us_statuses", serializers.UserStoryStatusExportSerializer)
|
||||
store_project_attributes_values(project, data, "points", serializers.PointsExportSerializer)
|
||||
store_project_attributes_values(project, data, "task_statuses", serializers.TaskStatusExportSerializer)
|
||||
store_project_attributes_values(project, data, "issue_types", serializers.IssueTypeExportSerializer)
|
||||
store_project_attributes_values(project, data, "issue_statuses", serializers.IssueStatusExportSerializer)
|
||||
store_project_attributes_values(project, data, "priorities", serializers.PriorityExportSerializer)
|
||||
store_project_attributes_values(project, data, "severities", serializers.SeverityExportSerializer)
|
||||
store_project_attributes_values(project, data, "epic_statuses", validators.EpicStatusExportValidator)
|
||||
store_project_attributes_values(project, data, "us_statuses", validators.UserStoryStatusExportValidator)
|
||||
store_project_attributes_values(project, data, "points", validators.PointsExportValidator)
|
||||
store_project_attributes_values(project, data, "task_statuses", validators.TaskStatusExportValidator)
|
||||
store_project_attributes_values(project, data, "issue_types", validators.IssueTypeExportValidator)
|
||||
store_project_attributes_values(project, data, "issue_statuses", validators.IssueStatusExportValidator)
|
||||
store_project_attributes_values(project, data, "priorities", validators.PriorityExportValidator)
|
||||
store_project_attributes_values(project, data, "severities", validators.SeverityExportValidator)
|
||||
check_if_there_is_some_error(_("error importing lists of project attributes"), project)
|
||||
|
||||
# Create default values for project attributes
|
||||
|
@ -668,12 +763,14 @@ def _populate_project_object(project, data):
|
|||
check_if_there_is_some_error(_("error importing default project attributes values"), project)
|
||||
|
||||
# Create custom attributes
|
||||
store_custom_attributes(project, data, "epiccustomattributes",
|
||||
validators.EpicCustomAttributeExportValidator)
|
||||
store_custom_attributes(project, data, "userstorycustomattributes",
|
||||
serializers.UserStoryCustomAttributeExportSerializer)
|
||||
validators.UserStoryCustomAttributeExportValidator)
|
||||
store_custom_attributes(project, data, "taskcustomattributes",
|
||||
serializers.TaskCustomAttributeExportSerializer)
|
||||
validators.TaskCustomAttributeExportValidator)
|
||||
store_custom_attributes(project, data, "issuecustomattributes",
|
||||
serializers.IssueCustomAttributeExportSerializer)
|
||||
validators.IssueCustomAttributeExportValidator)
|
||||
check_if_there_is_some_error(_("error importing custom attributes"), project)
|
||||
|
||||
# Create milestones
|
||||
|
@ -688,6 +785,10 @@ def _populate_project_object(project, data):
|
|||
store_user_stories(project, data)
|
||||
check_if_there_is_some_error(_("error importing user stories"), project)
|
||||
|
||||
# Creat epics
|
||||
store_epics(project, data)
|
||||
check_if_there_is_some_error(_("error importing epics"), project)
|
||||
|
||||
# Createer tasks
|
||||
store_tasks(project, data)
|
||||
check_if_there_is_some_error(_("error importing tasks"), project)
|
||||
|
|
|
@ -46,13 +46,11 @@ def dump_project(self, user, project, dump_format):
|
|||
try:
|
||||
if dump_format == "gzip":
|
||||
path = "exports/{}/{}-{}.json.gz".format(project.pk, project.slug, self.request.id)
|
||||
storage_path = default_storage.path(path)
|
||||
with default_storage.open(storage_path, mode="wb") as outfile:
|
||||
with default_storage.open(path, mode="wb") as outfile:
|
||||
services.render_project(project, gzip.GzipFile(fileobj=outfile))
|
||||
else:
|
||||
path = "exports/{}/{}-{}.json".format(project.pk, project.slug, self.request.id)
|
||||
storage_path = default_storage.path(path)
|
||||
with default_storage.open(storage_path, mode="wb") as outfile:
|
||||
with default_storage.open(path, mode="wb") as outfile:
|
||||
services.render_project(project, outfile)
|
||||
|
||||
url = default_storage.url(path)
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
from .validators import PointsExportValidator
|
||||
from .validators import EpicStatusExportValidator
|
||||
from .validators import UserStoryStatusExportValidator
|
||||
from .validators import TaskStatusExportValidator
|
||||
from .validators import IssueStatusExportValidator
|
||||
from .validators import PriorityExportValidator
|
||||
from .validators import SeverityExportValidator
|
||||
from .validators import IssueTypeExportValidator
|
||||
from .validators import RoleExportValidator
|
||||
from .validators import EpicCustomAttributeExportValidator
|
||||
from .validators import UserStoryCustomAttributeExportValidator
|
||||
from .validators import TaskCustomAttributeExportValidator
|
||||
from .validators import IssueCustomAttributeExportValidator
|
||||
from .validators import BaseCustomAttributesValuesExportValidator
|
||||
from .validators import UserStoryCustomAttributesValuesExportValidator
|
||||
from .validators import TaskCustomAttributesValuesExportValidator
|
||||
from .validators import IssueCustomAttributesValuesExportValidator
|
||||
from .validators import MembershipExportValidator
|
||||
from .validators import RolePointsExportValidator
|
||||
from .validators import MilestoneExportValidator
|
||||
from .validators import TaskExportValidator
|
||||
from .validators import EpicRelatedUserStoryExportValidator
|
||||
from .validators import EpicExportValidator
|
||||
from .validators import UserStoryExportValidator
|
||||
from .validators import IssueExportValidator
|
||||
from .validators import WikiPageExportValidator
|
||||
from .validators import WikiLinkExportValidator
|
||||
from .validators import TimelineExportValidator
|
||||
from .validators import ProjectExportValidator
|
||||
from .mixins import AttachmentExportValidator
|
||||
from .mixins import HistoryExportValidator
|
|
@ -0,0 +1,43 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from taiga.users import models as users_models
|
||||
|
||||
_cache_user_by_pk = {}
|
||||
_cache_user_by_email = {}
|
||||
_custom_tasks_attributes_cache = {}
|
||||
_custom_issues_attributes_cache = {}
|
||||
_custom_epics_attributes_cache = {}
|
||||
_custom_userstories_attributes_cache = {}
|
||||
|
||||
|
||||
def cached_get_user_by_pk(pk):
|
||||
if pk not in _cache_user_by_pk:
|
||||
try:
|
||||
_cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk)
|
||||
except Exception:
|
||||
_cache_user_by_pk[pk] = users_models.User.objects.get(pk=pk)
|
||||
return _cache_user_by_pk[pk]
|
||||
|
||||
def cached_get_user_by_email(email):
|
||||
if email not in _cache_user_by_email:
|
||||
try:
|
||||
_cache_user_by_email[email] = users_models.User.objects.get(email=email)
|
||||
except Exception:
|
||||
_cache_user_by_email[email] = users_models.User.objects.get(email=email)
|
||||
return _cache_user_by_email[email]
|
|
@ -0,0 +1,196 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import base64
|
||||
import copy
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.exceptions import ValidationError
|
||||
from taiga.base.fields import JsonField
|
||||
from taiga.mdrender.service import render as mdrender
|
||||
from taiga.users import models as users_models
|
||||
|
||||
from .cache import cached_get_user_by_email
|
||||
|
||||
|
||||
class FileField(serializers.WritableField):
|
||||
read_only = False
|
||||
|
||||
def from_native(self, data):
|
||||
if not data:
|
||||
return None
|
||||
|
||||
decoded_data = b''
|
||||
# The original file was encoded by chunks but we don't really know its
|
||||
# length or if it was multiple of 3 so we must iterate over all those chunks
|
||||
# decoding them one by one
|
||||
for decoding_chunk in data['data'].split("="):
|
||||
# When encoding to base64 3 bytes are transformed into 4 bytes and
|
||||
# the extra space of the block is filled with =
|
||||
# We must ensure that the decoding chunk has a length multiple of 4 so
|
||||
# we restore the stripped '='s adding appending them until the chunk has
|
||||
# a length multiple of 4
|
||||
decoding_chunk += "=" * (-len(decoding_chunk) % 4)
|
||||
decoded_data += base64.b64decode(decoding_chunk + "=")
|
||||
|
||||
return ContentFile(decoded_data, name=data['name'])
|
||||
|
||||
|
||||
class ContentTypeField(serializers.RelatedField):
|
||||
read_only = False
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
return ContentType.objects.get_by_natural_key(*data)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
class RelatedNoneSafeField(serializers.RelatedField):
|
||||
def field_from_native(self, data, files, field_name, into):
|
||||
if self.read_only:
|
||||
return
|
||||
|
||||
try:
|
||||
if self.many:
|
||||
try:
|
||||
# Form data
|
||||
value = data.getlist(field_name)
|
||||
if value == [''] or value == []:
|
||||
raise KeyError
|
||||
except AttributeError:
|
||||
# Non-form data
|
||||
value = data[field_name]
|
||||
else:
|
||||
value = data[field_name]
|
||||
except KeyError:
|
||||
if self.partial:
|
||||
return
|
||||
value = self.get_default_value()
|
||||
|
||||
key = self.source or field_name
|
||||
if value in self.null_values:
|
||||
if self.required:
|
||||
raise ValidationError(self.error_messages['required'])
|
||||
into[key] = None
|
||||
elif self.many:
|
||||
into[key] = [self.from_native(item) for item in value if self.from_native(item) is not None]
|
||||
else:
|
||||
into[key] = self.from_native(value)
|
||||
|
||||
|
||||
class UserRelatedField(RelatedNoneSafeField):
|
||||
read_only = False
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
return cached_get_user_by_email(data)
|
||||
except users_models.User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class UserPkField(serializers.RelatedField):
|
||||
read_only = False
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
user = cached_get_user_by_email(data)
|
||||
return user.pk
|
||||
except users_models.User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class CommentField(serializers.WritableField):
|
||||
read_only = False
|
||||
|
||||
def field_from_native(self, data, files, field_name, into):
|
||||
super().field_from_native(data, files, field_name, into)
|
||||
into["comment_html"] = mdrender(self.context['project'], data.get("comment", ""))
|
||||
|
||||
|
||||
class ProjectRelatedField(serializers.RelatedField):
|
||||
read_only = False
|
||||
null_values = (None, "")
|
||||
|
||||
def __init__(self, slug_field, *args, **kwargs):
|
||||
self.slug_field = slug_field
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def from_native(self, data):
|
||||
try:
|
||||
kwargs = {self.slug_field: data, "project": self.context['project']}
|
||||
return self.queryset.get(**kwargs)
|
||||
except ObjectDoesNotExist:
|
||||
raise ValidationError(_("{}=\"{}\" not found in this project".format(self.slug_field, data)))
|
||||
|
||||
|
||||
class HistoryUserField(JsonField):
|
||||
def from_native(self, data):
|
||||
if data is None:
|
||||
return {}
|
||||
|
||||
if len(data) < 2:
|
||||
return {}
|
||||
|
||||
user = UserRelatedField().from_native(data[0])
|
||||
|
||||
if user:
|
||||
pk = user.pk
|
||||
else:
|
||||
pk = None
|
||||
|
||||
return {"pk": pk, "name": data[1]}
|
||||
|
||||
|
||||
class HistoryValuesField(JsonField):
|
||||
def from_native(self, data):
|
||||
if data is None:
|
||||
return []
|
||||
if "users" in data:
|
||||
data['users'] = list(map(UserPkField().from_native, data['users']))
|
||||
return data
|
||||
|
||||
|
||||
class HistoryDiffField(JsonField):
|
||||
def from_native(self, data):
|
||||
if data is None:
|
||||
return []
|
||||
|
||||
if "assigned_to" in data:
|
||||
data['assigned_to'] = list(map(UserPkField().from_native, data['assigned_to']))
|
||||
return data
|
||||
|
||||
|
||||
class TimelineDataField(serializers.WritableField):
|
||||
read_only = False
|
||||
|
||||
def from_native(self, data):
|
||||
new_data = copy.deepcopy(data)
|
||||
try:
|
||||
user = cached_get_user_by_email(new_data["user"]["email"])
|
||||
new_data["user"]["id"] = user.id
|
||||
del new_data["user"]["email"]
|
||||
except users_models.User.DoesNotExist:
|
||||
pass
|
||||
|
||||
return new_data
|
|
@ -0,0 +1,97 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.api import validators
|
||||
from taiga.projects.history import models as history_models
|
||||
from taiga.projects.attachments import models as attachments_models
|
||||
from taiga.projects.notifications import services as notifications_services
|
||||
from taiga.projects.history import services as history_service
|
||||
|
||||
from .fields import (UserRelatedField, HistoryUserField, HistoryDiffField,
|
||||
JsonField, HistoryValuesField, CommentField, FileField)
|
||||
|
||||
|
||||
class HistoryExportValidator(validators.ModelValidator):
|
||||
user = HistoryUserField()
|
||||
diff = HistoryDiffField(required=False)
|
||||
snapshot = JsonField(required=False)
|
||||
values = HistoryValuesField(required=False)
|
||||
comment = CommentField(required=False)
|
||||
delete_comment_date = serializers.DateTimeField(required=False)
|
||||
delete_comment_user = HistoryUserField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = history_models.HistoryEntry
|
||||
exclude = ("id", "comment_html", "key", "project")
|
||||
|
||||
|
||||
class AttachmentExportValidator(validators.ModelValidator):
|
||||
owner = UserRelatedField(required=False)
|
||||
attached_file = FileField()
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = attachments_models.Attachment
|
||||
exclude = ('id', 'content_type', 'object_id', 'project')
|
||||
|
||||
|
||||
class WatcheableObjectModelValidatorMixin(validators.ModelValidator):
|
||||
watchers = UserRelatedField(many=True, required=False)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._watchers_field = self.base_fields.pop("watchers", None)
|
||||
super(WatcheableObjectModelValidatorMixin, self).__init__(*args, **kwargs)
|
||||
|
||||
"""
|
||||
watchers is not a field from the model so we need to do some magic to make it work like a normal field
|
||||
It's supposed to be represented as an email list but internally it's treated like notifications.Watched instances
|
||||
"""
|
||||
|
||||
def restore_object(self, attrs, instance=None):
|
||||
self.fields.pop("watchers", None)
|
||||
instance = super(WatcheableObjectModelValidatorMixin, self).restore_object(attrs, instance)
|
||||
self._watchers = self.init_data.get("watchers", [])
|
||||
return instance
|
||||
|
||||
def save_watchers(self):
|
||||
new_watcher_emails = set(self._watchers)
|
||||
old_watcher_emails = set(self.object.get_watchers().values_list("email", flat=True))
|
||||
adding_watcher_emails = list(new_watcher_emails.difference(old_watcher_emails))
|
||||
removing_watcher_emails = list(old_watcher_emails.difference(new_watcher_emails))
|
||||
|
||||
User = get_user_model()
|
||||
adding_users = User.objects.filter(email__in=adding_watcher_emails)
|
||||
removing_users = User.objects.filter(email__in=removing_watcher_emails)
|
||||
|
||||
for user in adding_users:
|
||||
notifications_services.add_watcher(self.object, user)
|
||||
|
||||
for user in removing_users:
|
||||
notifications_services.remove_watcher(self.object, user)
|
||||
|
||||
self.object.watchers = [user.email for user in self.object.get_watchers()]
|
||||
|
||||
def to_native(self, obj):
|
||||
ret = super(WatcheableObjectModelValidatorMixin, self).to_native(obj)
|
||||
ret["watchers"] = [user.email for user in obj.get_watchers()]
|
||||
return ret
|
|
@ -0,0 +1,402 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.api import validators
|
||||
from taiga.base.fields import JsonField, PgArrayField
|
||||
from taiga.base.exceptions import ValidationError
|
||||
|
||||
from taiga.projects import models as projects_models
|
||||
from taiga.projects.custom_attributes import models as custom_attributes_models
|
||||
from taiga.projects.epics import models as epics_models
|
||||
from taiga.projects.userstories import models as userstories_models
|
||||
from taiga.projects.tasks import models as tasks_models
|
||||
from taiga.projects.issues import models as issues_models
|
||||
from taiga.projects.milestones import models as milestones_models
|
||||
from taiga.projects.wiki import models as wiki_models
|
||||
from taiga.timeline import models as timeline_models
|
||||
from taiga.users import models as users_models
|
||||
|
||||
from .fields import (FileField, UserRelatedField,
|
||||
ProjectRelatedField,
|
||||
TimelineDataField, ContentTypeField)
|
||||
from .mixins import WatcheableObjectModelValidatorMixin
|
||||
from .cache import (_custom_tasks_attributes_cache,
|
||||
_custom_epics_attributes_cache,
|
||||
_custom_userstories_attributes_cache,
|
||||
_custom_issues_attributes_cache)
|
||||
|
||||
|
||||
class PointsExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.Points
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class EpicStatusExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.EpicStatus
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class UserStoryStatusExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.UserStoryStatus
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class TaskStatusExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.TaskStatus
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class IssueStatusExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.IssueStatus
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class PriorityExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.Priority
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class SeverityExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.Severity
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class IssueTypeExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = projects_models.IssueType
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class RoleExportValidator(validators.ModelValidator):
|
||||
permissions = PgArrayField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = users_models.Role
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class EpicCustomAttributeExportValidator(validators.ModelValidator):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.EpicCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class UserStoryCustomAttributeExportValidator(validators.ModelValidator):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.UserStoryCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class TaskCustomAttributeExportValidator(validators.ModelValidator):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.TaskCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class IssueCustomAttributeExportValidator(validators.ModelValidator):
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = custom_attributes_models.IssueCustomAttribute
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class BaseCustomAttributesValuesExportValidator(validators.ModelValidator):
|
||||
attributes_values = JsonField(source="attributes_values", required=True)
|
||||
_custom_attribute_model = None
|
||||
_container_field = None
|
||||
|
||||
class Meta:
|
||||
exclude = ("id",)
|
||||
|
||||
def validate_attributes_values(self, attrs, source):
|
||||
# values must be a dict
|
||||
data_values = attrs.get("attributes_values", None)
|
||||
if self.object:
|
||||
data_values = (data_values or self.object.attributes_values)
|
||||
|
||||
if type(data_values) is not dict:
|
||||
raise ValidationError(_("Invalid content. It must be {\"key\": \"value\",...}"))
|
||||
|
||||
# Values keys must be in the container object project
|
||||
data_container = attrs.get(self._container_field, None)
|
||||
if data_container:
|
||||
project_id = data_container.project_id
|
||||
elif self.object:
|
||||
project_id = getattr(self.object, self._container_field).project_id
|
||||
else:
|
||||
project_id = None
|
||||
|
||||
values_ids = list(data_values.keys())
|
||||
qs = self._custom_attribute_model.objects.filter(project=project_id,
|
||||
id__in=values_ids)
|
||||
if qs.count() != len(values_ids):
|
||||
raise ValidationError(_("It contain invalid custom fields."))
|
||||
|
||||
return attrs
|
||||
|
||||
|
||||
class EpicCustomAttributesValuesExportValidator(BaseCustomAttributesValuesExportValidator):
|
||||
_custom_attribute_model = custom_attributes_models.EpicCustomAttribute
|
||||
_container_model = "epics.Epic"
|
||||
_container_field = "epic"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportValidator.Meta):
|
||||
model = custom_attributes_models.EpicCustomAttributesValues
|
||||
|
||||
|
||||
class UserStoryCustomAttributesValuesExportValidator(BaseCustomAttributesValuesExportValidator):
|
||||
_custom_attribute_model = custom_attributes_models.UserStoryCustomAttribute
|
||||
_container_model = "userstories.UserStory"
|
||||
_container_field = "user_story"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportValidator.Meta):
|
||||
model = custom_attributes_models.UserStoryCustomAttributesValues
|
||||
|
||||
|
||||
class TaskCustomAttributesValuesExportValidator(BaseCustomAttributesValuesExportValidator):
|
||||
_custom_attribute_model = custom_attributes_models.TaskCustomAttribute
|
||||
_container_field = "task"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportValidator.Meta):
|
||||
model = custom_attributes_models.TaskCustomAttributesValues
|
||||
|
||||
|
||||
class IssueCustomAttributesValuesExportValidator(BaseCustomAttributesValuesExportValidator):
|
||||
_custom_attribute_model = custom_attributes_models.IssueCustomAttribute
|
||||
_container_field = "issue"
|
||||
|
||||
class Meta(BaseCustomAttributesValuesExportValidator.Meta):
|
||||
model = custom_attributes_models.IssueCustomAttributesValues
|
||||
|
||||
|
||||
class MembershipExportValidator(validators.ModelValidator):
|
||||
user = UserRelatedField(required=False)
|
||||
role = ProjectRelatedField(slug_field="name")
|
||||
invited_by = UserRelatedField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = projects_models.Membership
|
||||
exclude = ('id', 'project', 'token')
|
||||
|
||||
def full_clean(self, instance):
|
||||
return instance
|
||||
|
||||
|
||||
class RolePointsExportValidator(validators.ModelValidator):
|
||||
role = ProjectRelatedField(slug_field="name")
|
||||
points = ProjectRelatedField(slug_field="name")
|
||||
|
||||
class Meta:
|
||||
model = userstories_models.RolePoints
|
||||
exclude = ('id', 'user_story')
|
||||
|
||||
|
||||
class MilestoneExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
estimated_start = serializers.DateField(required=False)
|
||||
estimated_finish = serializers.DateField(required=False)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
project = kwargs.pop('project', None)
|
||||
super(MilestoneExportValidator, self).__init__(*args, **kwargs)
|
||||
if project:
|
||||
self.project = project
|
||||
|
||||
def validate_name(self, attrs, source):
|
||||
"""
|
||||
Check the milestone name is not duplicated in the project
|
||||
"""
|
||||
name = attrs[source]
|
||||
qs = self.project.milestones.filter(name=name)
|
||||
if qs.exists():
|
||||
raise ValidationError(_("Name duplicated for the project"))
|
||||
|
||||
return attrs
|
||||
|
||||
class Meta:
|
||||
model = milestones_models.Milestone
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class TaskExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
user_story = ProjectRelatedField(slug_field="ref", required=False)
|
||||
milestone = ProjectRelatedField(slug_field="name", required=False)
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = tasks_models.Task
|
||||
exclude = ('id', 'project')
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_tasks_attributes_cache:
|
||||
_custom_tasks_attributes_cache[project.id] = list(project.taskcustomattributes.all().values('id', 'name'))
|
||||
return _custom_tasks_attributes_cache[project.id]
|
||||
|
||||
|
||||
class EpicRelatedUserStoryExportValidator(validators.ModelValidator):
|
||||
user_story = ProjectRelatedField(slug_field="ref")
|
||||
order = serializers.IntegerField()
|
||||
|
||||
class Meta:
|
||||
model = epics_models.RelatedUserStory
|
||||
exclude = ('id', 'epic')
|
||||
|
||||
|
||||
class EpicExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
user_stories = EpicRelatedUserStoryExportValidator(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = epics_models.Epic
|
||||
exclude = ('id', 'project')
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_epics_attributes_cache:
|
||||
_custom_epics_attributes_cache[project.id] = list(
|
||||
project.epiccustomattributes.all().values('id', 'name')
|
||||
)
|
||||
return _custom_epics_attributes_cache[project.id]
|
||||
|
||||
|
||||
class UserStoryExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
role_points = RolePointsExportValidator(many=True, required=False)
|
||||
owner = UserRelatedField(required=False)
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
milestone = ProjectRelatedField(slug_field="name", required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
generated_from_issue = ProjectRelatedField(slug_field="ref", required=False)
|
||||
|
||||
class Meta:
|
||||
model = userstories_models.UserStory
|
||||
exclude = ('id', 'project', 'points', 'tasks')
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_userstories_attributes_cache:
|
||||
_custom_userstories_attributes_cache[project.id] = list(
|
||||
project.userstorycustomattributes.all().values('id', 'name')
|
||||
)
|
||||
return _custom_userstories_attributes_cache[project.id]
|
||||
|
||||
|
||||
class IssueExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
status = ProjectRelatedField(slug_field="name")
|
||||
assigned_to = UserRelatedField(required=False)
|
||||
priority = ProjectRelatedField(slug_field="name")
|
||||
severity = ProjectRelatedField(slug_field="name")
|
||||
type = ProjectRelatedField(slug_field="name")
|
||||
milestone = ProjectRelatedField(slug_field="name", required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = issues_models.Issue
|
||||
exclude = ('id', 'project')
|
||||
|
||||
def custom_attributes_queryset(self, project):
|
||||
if project.id not in _custom_issues_attributes_cache:
|
||||
_custom_issues_attributes_cache[project.id] = list(project.issuecustomattributes.all().values('id', 'name'))
|
||||
return _custom_issues_attributes_cache[project.id]
|
||||
|
||||
|
||||
class WikiPageExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
owner = UserRelatedField(required=False)
|
||||
last_modifier = UserRelatedField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
|
||||
class Meta:
|
||||
model = wiki_models.WikiPage
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class WikiLinkExportValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = wiki_models.WikiLink
|
||||
exclude = ('id', 'project')
|
||||
|
||||
|
||||
class TimelineExportValidator(validators.ModelValidator):
|
||||
data = TimelineDataField()
|
||||
data_content_type = ContentTypeField()
|
||||
|
||||
class Meta:
|
||||
model = timeline_models.Timeline
|
||||
exclude = ('id', 'project', 'namespace', 'object_id', 'content_type')
|
||||
|
||||
|
||||
class ProjectExportValidator(WatcheableObjectModelValidatorMixin):
|
||||
logo = FileField(required=False)
|
||||
anon_permissions = PgArrayField(required=False)
|
||||
public_permissions = PgArrayField(required=False)
|
||||
modified_date = serializers.DateTimeField(required=False)
|
||||
roles = RoleExportValidator(many=True, required=False)
|
||||
owner = UserRelatedField(required=False)
|
||||
memberships = MembershipExportValidator(many=True, required=False)
|
||||
points = PointsExportValidator(many=True, required=False)
|
||||
us_statuses = UserStoryStatusExportValidator(many=True, required=False)
|
||||
task_statuses = TaskStatusExportValidator(many=True, required=False)
|
||||
issue_types = IssueTypeExportValidator(many=True, required=False)
|
||||
issue_statuses = IssueStatusExportValidator(many=True, required=False)
|
||||
priorities = PriorityExportValidator(many=True, required=False)
|
||||
severities = SeverityExportValidator(many=True, required=False)
|
||||
tags_colors = JsonField(required=False)
|
||||
creation_template = serializers.SlugRelatedField(slug_field="slug", required=False)
|
||||
default_points = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_us_status = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_task_status = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_priority = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_severity = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_issue_status = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
default_issue_type = serializers.SlugRelatedField(slug_field="name", required=False)
|
||||
userstorycustomattributes = UserStoryCustomAttributeExportValidator(many=True, required=False)
|
||||
taskcustomattributes = TaskCustomAttributeExportValidator(many=True, required=False)
|
||||
issuecustomattributes = IssueCustomAttributeExportValidator(many=True, required=False)
|
||||
user_stories = UserStoryExportValidator(many=True, required=False)
|
||||
tasks = TaskExportValidator(many=True, required=False)
|
||||
milestones = MilestoneExportValidator(many=True, required=False)
|
||||
issues = IssueExportValidator(many=True, required=False)
|
||||
wiki_links = WikiLinkExportValidator(many=True, required=False)
|
||||
wiki_pages = WikiPageExportValidator(many=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = projects_models.Project
|
||||
exclude = ('id', 'members')
|
|
@ -17,6 +17,7 @@
|
|||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from . import serializers
|
||||
from . import validators
|
||||
from . import models
|
||||
from . import permissions
|
||||
from . import services
|
||||
|
@ -27,12 +28,12 @@ from taiga.base.api import ModelCrudViewSet, ModelRetrieveViewSet
|
|||
from taiga.base.api.utils import get_object_or_404
|
||||
from taiga.base.decorators import list_route, detail_route
|
||||
|
||||
from django.db import transaction
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
|
||||
class Application(ModelRetrieveViewSet):
|
||||
serializer_class = serializers.ApplicationSerializer
|
||||
validator_class = validators.ApplicationValidator
|
||||
permission_classes = (permissions.ApplicationPermission,)
|
||||
model = models.Application
|
||||
|
||||
|
@ -61,6 +62,7 @@ class Application(ModelRetrieveViewSet):
|
|||
|
||||
class ApplicationToken(ModelCrudViewSet):
|
||||
serializer_class = serializers.ApplicationTokenSerializer
|
||||
validator_class = validators.ApplicationTokenValidator
|
||||
permission_classes = (permissions.ApplicationTokenPermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
|
@ -87,9 +89,9 @@ class ApplicationToken(ModelCrudViewSet):
|
|||
auth_code = request.DATA.get("auth_code", None)
|
||||
state = request.DATA.get("state", None)
|
||||
application_token = get_object_or_404(models.ApplicationToken,
|
||||
application__id=application_id,
|
||||
auth_code=auth_code,
|
||||
state=state)
|
||||
application__id=application_id,
|
||||
auth_code=auth_code,
|
||||
state=state)
|
||||
|
||||
application_token.generate_token()
|
||||
application_token.save()
|
||||
|
|
|
@ -16,9 +16,8 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import json
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.fields import Field
|
||||
|
||||
from . import models
|
||||
from . import services
|
||||
|
@ -26,33 +25,27 @@ from . import services
|
|||
from django.utils.translation import ugettext as _
|
||||
|
||||
|
||||
class ApplicationSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Application
|
||||
fields = ("id", "name", "web", "description", "icon_url")
|
||||
class ApplicationSerializer(serializers.LightSerializer):
|
||||
id = Field()
|
||||
name = Field()
|
||||
web = Field()
|
||||
description = Field()
|
||||
icon_url = Field()
|
||||
|
||||
|
||||
class ApplicationTokenSerializer(serializers.ModelSerializer):
|
||||
cyphered_token = serializers.CharField(source="cyphered_token", read_only=True)
|
||||
next_url = serializers.CharField(source="next_url", read_only=True)
|
||||
application = ApplicationSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = models.ApplicationToken
|
||||
fields = ("user", "id", "application", "auth_code", "next_url")
|
||||
class ApplicationTokenSerializer(serializers.LightSerializer):
|
||||
id = Field()
|
||||
user = Field(attr="user_id")
|
||||
application = ApplicationSerializer()
|
||||
auth_code = Field()
|
||||
next_url = Field()
|
||||
|
||||
|
||||
class AuthorizationCodeSerializer(serializers.ModelSerializer):
|
||||
next_url = serializers.CharField(source="next_url", read_only=True)
|
||||
class Meta:
|
||||
model = models.ApplicationToken
|
||||
fields = ("auth_code", "state", "next_url")
|
||||
class AuthorizationCodeSerializer(serializers.LightSerializer):
|
||||
state = Field()
|
||||
auth_code = Field()
|
||||
next_url = Field()
|
||||
|
||||
|
||||
class AccessTokenSerializer(serializers.ModelSerializer):
|
||||
cyphered_token = serializers.CharField(source="cyphered_token", read_only=True)
|
||||
next_url = serializers.CharField(source="next_url", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = models.ApplicationToken
|
||||
fields = ("cyphered_token", )
|
||||
class AccessTokenSerializer(serializers.LightSerializer):
|
||||
cyphered_token = Field()
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from taiga.base.api import serializers
|
||||
|
||||
from . import models
|
||||
from taiga.base.api import validators
|
||||
|
||||
|
||||
class ApplicationValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = models.Application
|
||||
fields = ("id", "name", "web", "description", "icon_url")
|
||||
|
||||
|
||||
class ApplicationTokenValidator(validators.ModelValidator):
|
||||
cyphered_token = serializers.CharField(source="cyphered_token", read_only=True)
|
||||
next_url = serializers.CharField(source="next_url", read_only=True)
|
||||
application = ApplicationValidator(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = models.ApplicationToken
|
||||
fields = ("user", "id", "application", "auth_code", "next_url")
|
||||
|
||||
|
||||
class AuthorizationCodeValidator(validators.ModelValidator):
|
||||
next_url = serializers.CharField(source="next_url", read_only=True)
|
||||
class Meta:
|
||||
model = models.ApplicationToken
|
||||
fields = ("auth_code", "state", "next_url")
|
||||
|
||||
|
||||
class AccessTokenValidator(validators.ModelValidator):
|
||||
cyphered_token = serializers.CharField(source="cyphered_token", read_only=True)
|
||||
next_url = serializers.CharField(source="next_url", read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = models.ApplicationToken
|
||||
fields = ("cyphered_token", )
|
|
@ -20,7 +20,7 @@ from taiga.base import response
|
|||
from taiga.base.api import viewsets
|
||||
|
||||
from . import permissions
|
||||
from . import serializers
|
||||
from . import validators
|
||||
from . import services
|
||||
|
||||
import copy
|
||||
|
@ -28,7 +28,7 @@ import copy
|
|||
|
||||
class FeedbackViewSet(viewsets.ViewSet):
|
||||
permission_classes = (permissions.FeedbackPermission,)
|
||||
serializer_class = serializers.FeedbackEntrySerializer
|
||||
validator_class = validators.FeedbackEntryValidator
|
||||
|
||||
def create(self, request, **kwargs):
|
||||
self.check_permissions(request, "create", None)
|
||||
|
@ -37,11 +37,11 @@ class FeedbackViewSet(viewsets.ViewSet):
|
|||
data.update({"full_name": request.user.get_full_name(),
|
||||
"email": request.user.email})
|
||||
|
||||
serializer = self.serializer_class(data=data)
|
||||
if not serializer.is_valid():
|
||||
return response.BadRequest(serializer.errors)
|
||||
validator = self.validator_class(data=data)
|
||||
if not validator.is_valid():
|
||||
return response.BadRequest(validator.errors)
|
||||
|
||||
self.object = serializer.save(force_insert=True)
|
||||
self.object = validator.save(force_insert=True)
|
||||
|
||||
extra = {
|
||||
"HTTP_HOST": request.META.get("HTTP_HOST", None),
|
||||
|
@ -50,4 +50,4 @@ class FeedbackViewSet(viewsets.ViewSet):
|
|||
}
|
||||
services.send_feedback(self.object, extra, reply_to=[request.user.email])
|
||||
|
||||
return response.Ok(serializer.data)
|
||||
return response.Ok(validator.data)
|
||||
|
|
|
@ -16,11 +16,11 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from taiga.base.api import serializers
|
||||
from taiga.base.api import validators
|
||||
|
||||
from . import models
|
||||
|
||||
|
||||
class FeedbackEntrySerializer(serializers.ModelSerializer):
|
||||
class FeedbackEntryValidator(validators.ModelValidator):
|
||||
class Meta:
|
||||
model = models.FeedbackEntry
|
|
@ -21,11 +21,14 @@ from collections import OrderedDict
|
|||
from .generics import GenericSitemap
|
||||
|
||||
from .projects import ProjectsSitemap
|
||||
from .projects import ProjectEpicsSitemap
|
||||
from .projects import ProjectBacklogsSitemap
|
||||
from .projects import ProjectKanbansSitemap
|
||||
from .projects import ProjectIssuesSitemap
|
||||
from .projects import ProjectTeamsSitemap
|
||||
|
||||
from .epics import EpicsSitemap
|
||||
|
||||
from .milestones import MilestonesSitemap
|
||||
|
||||
from .userstories import UserStoriesSitemap
|
||||
|
@ -43,11 +46,14 @@ sitemaps = OrderedDict([
|
|||
("generics", GenericSitemap),
|
||||
|
||||
("projects", ProjectsSitemap),
|
||||
("project-epics-list", ProjectEpicsSitemap),
|
||||
("project-backlogs", ProjectBacklogsSitemap),
|
||||
("project-kanbans", ProjectKanbansSitemap),
|
||||
("project-issues-list", ProjectIssuesSitemap),
|
||||
("project-teams", ProjectTeamsSitemap),
|
||||
|
||||
("epics", EpicsSitemap),
|
||||
|
||||
("milestones", MilestonesSitemap),
|
||||
|
||||
("userstories", UserStoriesSitemap),
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.db.models import Q
|
||||
from django.apps import apps
|
||||
|
||||
from taiga.front.templatetags.functions import resolve
|
||||
|
||||
from .base import Sitemap
|
||||
|
||||
|
||||
class EpicsSitemap(Sitemap):
|
||||
def items(self):
|
||||
epic_model = apps.get_model("epics", "Epic")
|
||||
|
||||
# Get epics of public projects OR private projects if anon user can view them
|
||||
queryset = epic_model.objects.filter(Q(project__is_private=False) |
|
||||
Q(project__is_private=True,
|
||||
project__anon_permissions__contains=["view_epics"]))
|
||||
|
||||
# Exclude blocked projects
|
||||
queryset = queryset.filter(project__blocked_code__isnull=True)
|
||||
|
||||
# Project data is needed
|
||||
queryset = queryset.select_related("project")
|
||||
|
||||
return queryset
|
||||
|
||||
def location(self, obj):
|
||||
return resolve("epic", obj.project.slug, obj.ref)
|
||||
|
||||
def lastmod(self, obj):
|
||||
return obj.modified_date
|
||||
|
||||
def changefreq(self, obj):
|
||||
return "daily"
|
||||
|
||||
def priority(self, obj):
|
||||
return 0.4
|
|
@ -51,6 +51,34 @@ class ProjectsSitemap(Sitemap):
|
|||
return 0.9
|
||||
|
||||
|
||||
class ProjectEpicsSitemap(Sitemap):
|
||||
def items(self):
|
||||
project_model = apps.get_model("projects", "Project")
|
||||
|
||||
# Get public projects OR private projects if anon user can view them and epics
|
||||
queryset = project_model.objects.filter(Q(is_private=False) |
|
||||
Q(is_private=True,
|
||||
anon_permissions__contains=["view_project",
|
||||
"view_epics"]))
|
||||
|
||||
# Exclude projects without epics enabled
|
||||
queryset = queryset.exclude(is_epics_activated=False)
|
||||
|
||||
return queryset
|
||||
|
||||
def location(self, obj):
|
||||
return resolve("epics", obj.slug)
|
||||
|
||||
def lastmod(self, obj):
|
||||
return obj.modified_date
|
||||
|
||||
def changefreq(self, obj):
|
||||
return "daily"
|
||||
|
||||
def priority(self, obj):
|
||||
return 0.6
|
||||
|
||||
|
||||
class ProjectBacklogsSitemap(Sitemap):
|
||||
def items(self):
|
||||
project_model = apps.get_model("projects", "Project")
|
||||
|
|
|
@ -33,6 +33,9 @@ urls = {
|
|||
|
||||
"project": "/project/{0}", # project.slug
|
||||
|
||||
"epics": "/project/{0}/epics/", # project.slug
|
||||
"epic": "/project/{0}/epic/{1}", # project.slug, epic.ref
|
||||
|
||||
"backlog": "/project/{0}/backlog/", # project.slug
|
||||
"taskboard": "/project/{0}/taskboard/{1}", # project.slug, milestone.slug
|
||||
"kanban": "/project/{0}/kanban/", # project.slug
|
||||
|
|
|
@ -72,13 +72,5 @@ class BitBucketViewSet(BaseWebhookApiViewSet):
|
|||
|
||||
return project_secret == secret_key
|
||||
|
||||
def _get_project(self, request):
|
||||
project_id = request.GET.get("project", None)
|
||||
try:
|
||||
project = Project.objects.get(id=project_id)
|
||||
return project
|
||||
except Project.DoesNotExist:
|
||||
return None
|
||||
|
||||
def _get_event_name(self, request):
|
||||
return request.META.get('HTTP_X_EVENT_KEY', None)
|
||||
|
|
|
@ -18,181 +18,67 @@
|
|||
|
||||
import re
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.base import exceptions as exc
|
||||
from taiga.projects.models import IssueStatus, TaskStatus, UserStoryStatus
|
||||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.userstories.models import UserStory
|
||||
from taiga.projects.history.services import take_snapshot
|
||||
from taiga.projects.notifications.services import send_notifications
|
||||
from taiga.hooks.event_hooks import BaseEventHook
|
||||
from taiga.hooks.exceptions import ActionSyntaxException
|
||||
from taiga.base.utils import json
|
||||
|
||||
from .services import get_bitbucket_user
|
||||
from taiga.hooks.event_hooks import BaseNewIssueEventHook, BaseIssueCommentEventHook, BasePushEventHook
|
||||
|
||||
|
||||
class PushEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload is None:
|
||||
return
|
||||
class BaseBitBucketEventHook():
|
||||
platform = "BitBucket"
|
||||
platform_slug = "bitbucket"
|
||||
|
||||
def replace_bitbucket_references(self, project_url, wiki_text):
|
||||
if wiki_text is None:
|
||||
wiki_text = ""
|
||||
|
||||
template = "\g<1>[BitBucket#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
|
||||
|
||||
class IssuesEventHook(BaseBitBucketEventHook, BaseNewIssueEventHook):
|
||||
def get_data(self):
|
||||
description = self.payload.get('issue', {}).get('content', {}).get('raw', '')
|
||||
project_url = self.payload.get('repository', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
return {
|
||||
"number": self.payload.get('issue', {}).get('id', None),
|
||||
"subject": self.payload.get('issue', {}).get('title', None),
|
||||
"url": self.payload.get('issue', {}).get('links', {}).get('html', {}).get('href', None),
|
||||
"user_id": self.payload.get('actor', {}).get('uuid', None),
|
||||
"user_name": self.payload.get('actor', {}).get('username', None),
|
||||
"user_url": self.payload.get('actor', {}).get('links', {}).get('html', {}).get('href'),
|
||||
"description": self.replace_bitbucket_references(project_url, description),
|
||||
}
|
||||
|
||||
|
||||
class IssueCommentEventHook(BaseBitBucketEventHook, BaseIssueCommentEventHook):
|
||||
def get_data(self):
|
||||
comment_message = self.payload.get('comment', {}).get('content', {}).get('raw', '')
|
||||
project_url = self.payload.get('repository', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
issue_url = self.payload.get('issue', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
comment_id = self.payload.get('comment', {}).get('id', None)
|
||||
comment_url = "{}#comment-{}".format(issue_url, comment_id)
|
||||
return {
|
||||
"number": self.payload.get('issue', {}).get('id', None),
|
||||
'url': issue_url,
|
||||
'user_id': self.payload.get('actor', {}).get('uuid', None),
|
||||
'user_name': self.payload.get('actor', {}).get('username', None),
|
||||
'user_url': self.payload.get('actor', {}).get('links', {}).get('html', {}).get('href'),
|
||||
'comment_url': comment_url,
|
||||
'comment_message': self.replace_bitbucket_references(project_url, comment_message)
|
||||
}
|
||||
|
||||
|
||||
class PushEventHook(BaseBitBucketEventHook, BasePushEventHook):
|
||||
def get_data(self):
|
||||
result = []
|
||||
changes = self.payload.get("push", {}).get('changes', [])
|
||||
for change in filter(None, changes):
|
||||
commits = change.get("commits", [])
|
||||
if not commits:
|
||||
continue
|
||||
|
||||
for commit in commits:
|
||||
message = commit.get("message", None)
|
||||
if not message:
|
||||
continue
|
||||
|
||||
self._process_message(message, None)
|
||||
|
||||
def _process_message(self, message, bitbucket_user):
|
||||
"""
|
||||
The message we will be looking for seems like
|
||||
TG-XX #yyyyyy
|
||||
Where:
|
||||
XX: is the ref for us, issue or task
|
||||
yyyyyy: is the status slug we are setting
|
||||
"""
|
||||
if message is None:
|
||||
return
|
||||
|
||||
p = re.compile("tg-(\d+) +#([-\w]+)")
|
||||
for m in p.finditer(message.lower()):
|
||||
ref = m.group(1)
|
||||
status_slug = m.group(2)
|
||||
self._change_status(ref, status_slug, bitbucket_user)
|
||||
|
||||
def _change_status(self, ref, status_slug, bitbucket_user):
|
||||
if Issue.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Issue
|
||||
statusClass = IssueStatus
|
||||
elif Task.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Task
|
||||
statusClass = TaskStatus
|
||||
elif UserStory.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = UserStory
|
||||
statusClass = UserStoryStatus
|
||||
else:
|
||||
raise ActionSyntaxException(_("The referenced element doesn't exist"))
|
||||
|
||||
element = modelClass.objects.get(project=self.project, ref=ref)
|
||||
|
||||
try:
|
||||
status = statusClass.objects.get(project=self.project, slug=status_slug)
|
||||
except statusClass.DoesNotExist:
|
||||
raise ActionSyntaxException(_("The status doesn't exist"))
|
||||
|
||||
element.status = status
|
||||
element.save()
|
||||
|
||||
snapshot = take_snapshot(element,
|
||||
comment=_("Status changed from BitBucket commit"),
|
||||
user=get_bitbucket_user(bitbucket_user))
|
||||
send_notifications(element, history=snapshot)
|
||||
|
||||
|
||||
def replace_bitbucket_references(project_url, wiki_text):
|
||||
template = "\g<1>[BitBucket#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
|
||||
|
||||
class IssuesEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
number = self.payload.get('issue', {}).get('id', None)
|
||||
subject = self.payload.get('issue', {}).get('title', None)
|
||||
|
||||
bitbucket_url = self.payload.get('issue', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
|
||||
bitbucket_user_id = self.payload.get('actor', {}).get('user', {}).get('uuid', None)
|
||||
bitbucket_user_name = self.payload.get('actor', {}).get('user', {}).get('username', None)
|
||||
bitbucket_user_url = self.payload.get('actor', {}).get('user', {}).get('links', {}).get('html', {}).get('href')
|
||||
|
||||
project_url = self.payload.get('repository', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
|
||||
description = self.payload.get('issue', {}).get('content', {}).get('raw', '')
|
||||
description = replace_bitbucket_references(project_url, description)
|
||||
|
||||
user = get_bitbucket_user(bitbucket_user_id)
|
||||
|
||||
if not all([subject, bitbucket_url, project_url]):
|
||||
raise ActionSyntaxException(_("Invalid issue information"))
|
||||
|
||||
issue = Issue.objects.create(
|
||||
project=self.project,
|
||||
subject=subject,
|
||||
description=description,
|
||||
status=self.project.default_issue_status,
|
||||
type=self.project.default_issue_type,
|
||||
severity=self.project.default_severity,
|
||||
priority=self.project.default_priority,
|
||||
external_reference=['bitbucket', bitbucket_url],
|
||||
owner=user
|
||||
)
|
||||
take_snapshot(issue, user=user)
|
||||
|
||||
if number and subject and bitbucket_user_name and bitbucket_user_url:
|
||||
comment = _("Issue created by [@{bitbucket_user_name}]({bitbucket_user_url} "
|
||||
"\"See @{bitbucket_user_name}'s BitBucket profile\") "
|
||||
"from BitBucket.\nOrigin BitBucket issue: [bb#{number} - {subject}]({bitbucket_url} "
|
||||
"\"Go to 'bb#{number} - {subject}'\"):\n\n"
|
||||
"{description}").format(bitbucket_user_name=bitbucket_user_name,
|
||||
bitbucket_user_url=bitbucket_user_url,
|
||||
number=number,
|
||||
subject=subject,
|
||||
bitbucket_url=bitbucket_url,
|
||||
description=description)
|
||||
else:
|
||||
comment = _("Issue created from BitBucket.")
|
||||
|
||||
snapshot = take_snapshot(issue, comment=comment, user=user)
|
||||
send_notifications(issue, history=snapshot)
|
||||
|
||||
|
||||
class IssueCommentEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
number = self.payload.get('issue', {}).get('id', None)
|
||||
subject = self.payload.get('issue', {}).get('title', None)
|
||||
|
||||
bitbucket_url = self.payload.get('issue', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
bitbucket_user_id = self.payload.get('actor', {}).get('user', {}).get('uuid', None)
|
||||
bitbucket_user_name = self.payload.get('actor', {}).get('user', {}).get('username', None)
|
||||
bitbucket_user_url = self.payload.get('actor', {}).get('user', {}).get('links', {}).get('html', {}).get('href')
|
||||
|
||||
project_url = self.payload.get('repository', {}).get('links', {}).get('html', {}).get('href', None)
|
||||
|
||||
comment_message = self.payload.get('comment', {}).get('content', {}).get('raw', '')
|
||||
comment_message = replace_bitbucket_references(project_url, comment_message)
|
||||
|
||||
user = get_bitbucket_user(bitbucket_user_id)
|
||||
|
||||
if not all([comment_message, bitbucket_url, project_url]):
|
||||
raise ActionSyntaxException(_("Invalid issue comment information"))
|
||||
|
||||
issues = Issue.objects.filter(external_reference=["bitbucket", bitbucket_url])
|
||||
tasks = Task.objects.filter(external_reference=["bitbucket", bitbucket_url])
|
||||
uss = UserStory.objects.filter(external_reference=["bitbucket", bitbucket_url])
|
||||
|
||||
for item in list(issues) + list(tasks) + list(uss):
|
||||
if number and subject and bitbucket_user_name and bitbucket_user_url:
|
||||
comment = _("Comment by [@{bitbucket_user_name}]({bitbucket_user_url} "
|
||||
"\"See @{bitbucket_user_name}'s BitBucket profile\") "
|
||||
"from BitBucket.\nOrigin BitBucket issue: [bb#{number} - {subject}]({bitbucket_url} "
|
||||
"\"Go to 'bb#{number} - {subject}'\")\n\n"
|
||||
"{message}").format(bitbucket_user_name=bitbucket_user_name,
|
||||
bitbucket_user_url=bitbucket_user_url,
|
||||
number=number,
|
||||
subject=subject,
|
||||
bitbucket_url=bitbucket_url,
|
||||
message=comment_message)
|
||||
else:
|
||||
comment = _("Comment From BitBucket:\n\n{message}").format(message=comment_message)
|
||||
|
||||
snapshot = take_snapshot(item, comment=comment, user=user)
|
||||
send_notifications(item, history=snapshot)
|
||||
for commit in change.get("commits", []):
|
||||
message = commit.get("message")
|
||||
result.append({
|
||||
'user_id': commit.get('author', {}).get('user', {}).get('uuid', None),
|
||||
"user_name": commit.get('author', {}).get('user', {}).get('username', None),
|
||||
"user_url": commit.get('author', {}).get('user', {}).get('links', {}).get('html', {}).get('href'),
|
||||
"commit_id": commit.get("hash", None),
|
||||
"commit_url": commit.get("links", {}).get('html', {}).get('href'),
|
||||
"commit_message": message.strip(),
|
||||
})
|
||||
return result
|
||||
|
|
|
@ -16,11 +16,251 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.contrib.auth import get_user_model
|
||||
from taiga.projects.models import IssueStatus, TaskStatus, UserStoryStatus, EpicStatus
|
||||
from taiga.projects.epics.models import Epic
|
||||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.userstories.models import UserStory
|
||||
from taiga.projects.history.services import take_snapshot
|
||||
from taiga.projects.notifications.services import send_notifications
|
||||
from taiga.hooks.exceptions import ActionSyntaxException
|
||||
from taiga.users.models import AuthData
|
||||
|
||||
|
||||
class BaseEventHook:
|
||||
platform = "Unknown"
|
||||
platform_slug = "unknown"
|
||||
|
||||
def __init__(self, project, payload):
|
||||
self.project = project
|
||||
self.payload = payload
|
||||
|
||||
def ignore(self):
|
||||
return False
|
||||
|
||||
def get_user(self, user_id, platform):
|
||||
user = None
|
||||
|
||||
if user_id:
|
||||
try:
|
||||
user = AuthData.objects.get(key=platform, value=user_id).user
|
||||
except AuthData.DoesNotExist:
|
||||
pass
|
||||
|
||||
if user is None:
|
||||
user = get_user_model().objects.get(is_system=True, username__startswith=platform)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
class BaseIssueCommentEventHook(BaseEventHook):
|
||||
def get_data(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def generate_issue_comment_message(self, **kwargs):
|
||||
_issue_comment_message = _(
|
||||
"[@{user_name}]({user_url} "
|
||||
"\"See @{user_name}'s {platform} profile\") "
|
||||
"says in [{platform}#{number}]({comment_url} \"Go to comment\"):\n\n"
|
||||
"\"{comment_message}\""
|
||||
)
|
||||
_simple_issue_comment_message = _("Comment From {platform}:\n\n> {comment_message}")
|
||||
try:
|
||||
return _issue_comment_message.format(platform=self.platform, **kwargs)
|
||||
except Exception:
|
||||
return _simple_issue_comment_message.format(platform=self.platform, message=kwargs.get('comment_message'))
|
||||
|
||||
def process_event(self):
|
||||
raise NotImplementedError("process_event must be overwritten")
|
||||
if self.ignore():
|
||||
return
|
||||
|
||||
data = self.get_data()
|
||||
|
||||
if not all([data['comment_message'], data['url']]):
|
||||
raise ActionSyntaxException(_("Invalid issue comment information"))
|
||||
|
||||
comment = self.generate_issue_comment_message(**data)
|
||||
|
||||
issues = Issue.objects.filter(external_reference=[self.platform_slug, data['url']])
|
||||
tasks = Task.objects.filter(external_reference=[self.platform_slug, data['url']])
|
||||
uss = UserStory.objects.filter(external_reference=[self.platform_slug, data['url']])
|
||||
|
||||
for item in list(issues) + list(tasks) + list(uss):
|
||||
snapshot = take_snapshot(item, comment=comment, user=self.get_user(data['user_id'], self.platform_slug))
|
||||
send_notifications(item, history=snapshot)
|
||||
|
||||
|
||||
class BaseNewIssueEventHook(BaseEventHook):
|
||||
def get_data(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def generate_new_issue_comment(self, **kwargs):
|
||||
_new_issue_message = _(
|
||||
"Issue created by [@{user_name}]({user_url} "
|
||||
"\"See @{user_name}'s {platform} profile\") "
|
||||
"from [{platform}#{number}]({url} \"Go to issue\")."
|
||||
)
|
||||
_simple_new_issue_message = _("Issue created from {platform}.")
|
||||
try:
|
||||
return _new_issue_message.format(platform=self.platform, **kwargs)
|
||||
except Exception:
|
||||
return _simple_new_issue_message.format(platform=self.platform)
|
||||
|
||||
def process_event(self):
|
||||
if self.ignore():
|
||||
return
|
||||
|
||||
data = self.get_data()
|
||||
|
||||
if not all([data['subject'], data['url']]):
|
||||
raise ActionSyntaxException(_("Invalid issue information"))
|
||||
|
||||
user = self.get_user(data['user_id'], self.platform_slug)
|
||||
|
||||
issue = Issue.objects.create(
|
||||
project=self.project,
|
||||
subject=data['subject'],
|
||||
description=data['description'],
|
||||
status=self.project.default_issue_status,
|
||||
type=self.project.default_issue_type,
|
||||
severity=self.project.default_severity,
|
||||
priority=self.project.default_priority,
|
||||
external_reference=[self.platform_slug, data['url']],
|
||||
owner=user
|
||||
)
|
||||
take_snapshot(issue, user=user)
|
||||
|
||||
comment = self.generate_new_issue_comment(**data)
|
||||
|
||||
snapshot = take_snapshot(issue, comment=comment, user=user)
|
||||
send_notifications(issue, history=snapshot)
|
||||
|
||||
|
||||
class BasePushEventHook(BaseEventHook):
|
||||
def get_data(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def generate_status_change_comment(self, **kwargs):
|
||||
if kwargs.get('user_url', None) is None:
|
||||
user_text = kwargs.get('user_name', _('unknown user'))
|
||||
else:
|
||||
user_text = "[@{user_name}]({user_url} \"See @{user_name}'s {platform} profile\")".format(
|
||||
platform=self.platform,
|
||||
**kwargs
|
||||
)
|
||||
_status_change_message = _(
|
||||
"{user_text} changed the status from "
|
||||
"[{platform} commit]({commit_url} \"See commit '{commit_id} - {commit_message}'\")\n\n"
|
||||
" - Status: **{src_status}** → **{dst_status}**"
|
||||
)
|
||||
_simple_status_change_message = _(
|
||||
"Changed status from {platform} commit.\n\n"
|
||||
" - Status: **{src_status}** → **{dst_status}**"
|
||||
)
|
||||
try:
|
||||
return _status_change_message.format(platform=self.platform, user_text=user_text, **kwargs)
|
||||
except Exception:
|
||||
return _simple_status_change_message.format(platform=self.platform)
|
||||
|
||||
def generate_commit_reference_comment(self, **kwargs):
|
||||
if kwargs.get('user_url', None) is None:
|
||||
user_text = kwargs.get('user_name', _('unknown user'))
|
||||
else:
|
||||
user_text = "[@{user_name}]({user_url} \"See @{user_name}'s {platform} profile\")".format(
|
||||
platform=self.platform,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
_status_change_message = _(
|
||||
"This {type_name} has been mentioned by {user_text} "
|
||||
"in the [{platform} commit]({commit_url} \"See commit '{commit_id} - {commit_message}'\") "
|
||||
"\"{commit_message}\""
|
||||
)
|
||||
_simple_status_change_message = _(
|
||||
"This issue has been mentioned in the {platform} commit "
|
||||
"\"{commit_message}\""
|
||||
)
|
||||
try:
|
||||
return _status_change_message.format(platform=self.platform, user_text=user_text, **kwargs)
|
||||
except Exception:
|
||||
return _simple_status_change_message.format(platform=self.platform)
|
||||
|
||||
def get_item_classes(self, ref):
|
||||
if Epic.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Epic
|
||||
statusClass = EpicStatus
|
||||
elif Issue.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Issue
|
||||
statusClass = IssueStatus
|
||||
elif Task.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Task
|
||||
statusClass = TaskStatus
|
||||
elif UserStory.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = UserStory
|
||||
statusClass = UserStoryStatus
|
||||
else:
|
||||
raise ActionSyntaxException(_("The referenced element doesn't exist"))
|
||||
|
||||
return (modelClass, statusClass)
|
||||
|
||||
def get_item_by_ref(self, ref):
|
||||
(modelClass, statusClass) = self.get_item_classes(ref)
|
||||
|
||||
return modelClass.objects.get(project=self.project, ref=ref)
|
||||
|
||||
def set_item_status(self, ref, status_slug):
|
||||
(modelClass, statusClass) = self.get_item_classes(ref)
|
||||
element = modelClass.objects.get(project=self.project, ref=ref)
|
||||
|
||||
try:
|
||||
status = statusClass.objects.get(project=self.project, slug=status_slug)
|
||||
except statusClass.DoesNotExist:
|
||||
raise ActionSyntaxException(_("The status doesn't exist"))
|
||||
|
||||
src_status = element.status.name
|
||||
dst_status = status.name
|
||||
|
||||
element.status = status
|
||||
element.save()
|
||||
return (element, src_status, dst_status)
|
||||
|
||||
def process_event(self):
|
||||
if self.ignore():
|
||||
return
|
||||
data = self.get_data()
|
||||
|
||||
for commit in data:
|
||||
consumed_refs = []
|
||||
|
||||
# Status changes
|
||||
p = re.compile("tg-(\d+) +#([-\w]+)")
|
||||
for m in p.finditer(commit['commit_message'].lower()):
|
||||
ref = m.group(1)
|
||||
status_slug = m.group(2)
|
||||
(element, src_status, dst_status) = self.set_item_status(ref, status_slug)
|
||||
|
||||
comment = self.generate_status_change_comment(src_status=src_status, dst_status=dst_status, **commit)
|
||||
snapshot = take_snapshot(element,
|
||||
comment=comment,
|
||||
user=self.get_user(commit['user_id'], self.platform_slug))
|
||||
send_notifications(element, history=snapshot)
|
||||
consumed_refs.append(ref)
|
||||
|
||||
# Reference on commit
|
||||
p = re.compile("tg-(\d+)")
|
||||
for m in p.finditer(commit['commit_message'].lower()):
|
||||
ref = m.group(1)
|
||||
if ref in consumed_refs:
|
||||
continue
|
||||
element = self.get_item_by_ref(ref)
|
||||
type_name = element.__class__._meta.verbose_name
|
||||
comment = self.generate_commit_reference_comment(type_name=type_name, **commit)
|
||||
snapshot = take_snapshot(element,
|
||||
comment=comment,
|
||||
user=self.get_user(commit['user_id'], self.platform_slug))
|
||||
send_notifications(element, history=snapshot)
|
||||
consumed_refs.append(ref)
|
||||
|
|
|
@ -16,201 +16,72 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.projects.models import IssueStatus, TaskStatus, UserStoryStatus
|
||||
|
||||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.userstories.models import UserStory
|
||||
from taiga.projects.history.services import take_snapshot
|
||||
from taiga.projects.notifications.services import send_notifications
|
||||
from taiga.hooks.event_hooks import BaseEventHook
|
||||
from taiga.hooks.exceptions import ActionSyntaxException
|
||||
|
||||
from .services import get_github_user
|
||||
|
||||
import re
|
||||
|
||||
|
||||
class PushEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload is None:
|
||||
return
|
||||
|
||||
github_user = self.payload.get('sender', {})
|
||||
|
||||
commits = self.payload.get("commits", [])
|
||||
for commit in commits:
|
||||
self._process_commit(commit, github_user)
|
||||
|
||||
def _process_commit(self, commit, github_user):
|
||||
"""
|
||||
The message we will be looking for seems like
|
||||
TG-XX #yyyyyy
|
||||
Where:
|
||||
XX: is the ref for us, issue or task
|
||||
yyyyyy: is the status slug we are setting
|
||||
"""
|
||||
message = commit.get("message", None)
|
||||
|
||||
if message is None:
|
||||
return
|
||||
|
||||
p = re.compile("tg-(\d+) +#([-\w]+)")
|
||||
for m in p.finditer(message.lower()):
|
||||
ref = m.group(1)
|
||||
status_slug = m.group(2)
|
||||
self._change_status(ref, status_slug, github_user, commit)
|
||||
|
||||
def _change_status(self, ref, status_slug, github_user, commit):
|
||||
if Issue.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Issue
|
||||
statusClass = IssueStatus
|
||||
elif Task.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Task
|
||||
statusClass = TaskStatus
|
||||
elif UserStory.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = UserStory
|
||||
statusClass = UserStoryStatus
|
||||
else:
|
||||
raise ActionSyntaxException(_("The referenced element doesn't exist"))
|
||||
|
||||
element = modelClass.objects.get(project=self.project, ref=ref)
|
||||
|
||||
try:
|
||||
status = statusClass.objects.get(project=self.project, slug=status_slug)
|
||||
except statusClass.DoesNotExist:
|
||||
raise ActionSyntaxException(_("The status doesn't exist"))
|
||||
|
||||
element.status = status
|
||||
element.save()
|
||||
|
||||
github_user_id = github_user.get('id', None)
|
||||
github_user_name = github_user.get('login', None)
|
||||
github_user_url = github_user.get('html_url', None)
|
||||
commit_id = commit.get("id", None)
|
||||
commit_url = commit.get("url", None)
|
||||
commit_message = commit.get("message", None)
|
||||
|
||||
if (github_user_id and github_user_name and github_user_url and
|
||||
commit_id and commit_url and commit_message):
|
||||
comment = _("Status changed by [@{github_user_name}]({github_user_url} "
|
||||
"\"See @{github_user_name}'s GitHub profile\") "
|
||||
"from GitHub commit [{commit_id}]({commit_url} "
|
||||
"\"See commit '{commit_id} - {commit_message}'\").").format(
|
||||
github_user_name=github_user_name,
|
||||
github_user_url=github_user_url,
|
||||
commit_id=commit_id[:7],
|
||||
commit_url=commit_url,
|
||||
commit_message=commit_message)
|
||||
|
||||
else:
|
||||
comment = _("Status changed from GitHub commit.")
|
||||
|
||||
snapshot = take_snapshot(element,
|
||||
comment=comment,
|
||||
user=get_github_user(github_user_id))
|
||||
send_notifications(element, history=snapshot)
|
||||
from taiga.hooks.event_hooks import BaseNewIssueEventHook, BaseIssueCommentEventHook, BasePushEventHook
|
||||
|
||||
|
||||
def replace_github_references(project_url, wiki_text):
|
||||
if wiki_text == None:
|
||||
wiki_text = ""
|
||||
class BaseGitHubEventHook():
|
||||
platform = "GitHub"
|
||||
platform_slug = "github"
|
||||
|
||||
template = "\g<1>[GitHub#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
def replace_github_references(self, project_url, wiki_text):
|
||||
if wiki_text is None:
|
||||
wiki_text = ""
|
||||
|
||||
template = "\g<1>[GitHub#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
|
||||
|
||||
class IssuesEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload.get('action', None) != "opened":
|
||||
return
|
||||
class IssuesEventHook(BaseGitHubEventHook, BaseNewIssueEventHook):
|
||||
def ignore(self):
|
||||
return self.payload.get('action', None) != "opened"
|
||||
|
||||
number = self.payload.get('issue', {}).get('number', None)
|
||||
subject = self.payload.get('issue', {}).get('title', None)
|
||||
github_url = self.payload.get('issue', {}).get('html_url', None)
|
||||
github_user_id = self.payload.get('issue', {}).get('user', {}).get('id', None)
|
||||
github_user_name = self.payload.get('issue', {}).get('user', {}).get('login', None)
|
||||
github_user_url = self.payload.get('issue', {}).get('user', {}).get('html_url', None)
|
||||
project_url = self.payload.get('repository', {}).get('html_url', None)
|
||||
def get_data(self):
|
||||
description = self.payload.get('issue', {}).get('body', None)
|
||||
description = replace_github_references(project_url, description)
|
||||
|
||||
user = get_github_user(github_user_id)
|
||||
|
||||
if not all([subject, github_url, project_url]):
|
||||
raise ActionSyntaxException(_("Invalid issue information"))
|
||||
|
||||
issue = Issue.objects.create(
|
||||
project=self.project,
|
||||
subject=subject,
|
||||
description=description,
|
||||
status=self.project.default_issue_status,
|
||||
type=self.project.default_issue_type,
|
||||
severity=self.project.default_severity,
|
||||
priority=self.project.default_priority,
|
||||
external_reference=['github', github_url],
|
||||
owner=user
|
||||
)
|
||||
take_snapshot(issue, user=user)
|
||||
|
||||
if number and subject and github_user_name and github_user_url:
|
||||
comment = _("Issue created by [@{github_user_name}]({github_user_url} "
|
||||
"\"See @{github_user_name}'s GitHub profile\") "
|
||||
"from GitHub.\nOrigin GitHub issue: [gh#{number} - {subject}]({github_url} "
|
||||
"\"Go to 'gh#{number} - {subject}'\"):\n\n"
|
||||
"{description}").format(github_user_name=github_user_name,
|
||||
github_user_url=github_user_url,
|
||||
number=number,
|
||||
subject=subject,
|
||||
github_url=github_url,
|
||||
description=description)
|
||||
else:
|
||||
comment = _("Issue created from GitHub.")
|
||||
|
||||
snapshot = take_snapshot(issue, comment=comment, user=user)
|
||||
send_notifications(issue, history=snapshot)
|
||||
|
||||
|
||||
class IssueCommentEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload.get('action', None) != "created":
|
||||
raise ActionSyntaxException(_("Invalid issue comment information"))
|
||||
|
||||
number = self.payload.get('issue', {}).get('number', None)
|
||||
subject = self.payload.get('issue', {}).get('title', None)
|
||||
github_url = self.payload.get('issue', {}).get('html_url', None)
|
||||
github_user_id = self.payload.get('sender', {}).get('id', None)
|
||||
github_user_name = self.payload.get('sender', {}).get('login', None)
|
||||
github_user_url = self.payload.get('sender', {}).get('html_url', None)
|
||||
project_url = self.payload.get('repository', {}).get('html_url', None)
|
||||
return {
|
||||
"number": self.payload.get('issue', {}).get('number', None),
|
||||
"subject": self.payload.get('issue', {}).get('title', None),
|
||||
"url": self.payload.get('issue', {}).get('html_url', None),
|
||||
"user_id": self.payload.get('issue', {}).get('user', {}).get('id', None),
|
||||
"user_name": self.payload.get('issue', {}).get('user', {}).get('login', None),
|
||||
"user_url": self.payload.get('issue', {}).get('user', {}).get('html_url', None),
|
||||
"description": self.replace_github_references(project_url, description),
|
||||
}
|
||||
|
||||
|
||||
class IssueCommentEventHook(BaseGitHubEventHook, BaseIssueCommentEventHook):
|
||||
def ignore(self):
|
||||
return self.payload.get('action', None) != "created"
|
||||
|
||||
def get_data(self):
|
||||
comment_message = self.payload.get('comment', {}).get('body', None)
|
||||
comment_message = replace_github_references(project_url, comment_message)
|
||||
project_url = self.payload.get('repository', {}).get('html_url', None)
|
||||
return {
|
||||
"number": self.payload.get('issue', {}).get('number', None),
|
||||
"url": self.payload.get('issue', {}).get('html_url', None),
|
||||
"user_id": self.payload.get('sender', {}).get('id', None),
|
||||
"user_name": self.payload.get('sender', {}).get('login', None),
|
||||
"user_url": self.payload.get('sender', {}).get('html_url', None),
|
||||
"comment_url": self.payload.get('comment', {}).get('html_url', None),
|
||||
"comment_message": self.replace_github_references(project_url, comment_message),
|
||||
}
|
||||
|
||||
user = get_github_user(github_user_id)
|
||||
|
||||
if not all([comment_message, github_url, project_url]):
|
||||
raise ActionSyntaxException(_("Invalid issue comment information"))
|
||||
class PushEventHook(BaseGitHubEventHook, BasePushEventHook):
|
||||
def get_data(self):
|
||||
result = []
|
||||
github_user = self.payload.get('sender', {})
|
||||
commits = self.payload.get("commits", [])
|
||||
for commit in filter(None, commits):
|
||||
result.append({
|
||||
"user_id": github_user.get('id', None),
|
||||
"user_name": github_user.get('login', None),
|
||||
"user_url": github_user.get('html_url', None),
|
||||
"commit_id": commit.get("id", None),
|
||||
"commit_url": commit.get("url", None),
|
||||
"commit_message": commit.get("message", None),
|
||||
})
|
||||
|
||||
issues = Issue.objects.filter(external_reference=["github", github_url])
|
||||
tasks = Task.objects.filter(external_reference=["github", github_url])
|
||||
uss = UserStory.objects.filter(external_reference=["github", github_url])
|
||||
|
||||
for item in list(issues) + list(tasks) + list(uss):
|
||||
if number and subject and github_user_name and github_user_url:
|
||||
comment = _("Comment by [@{github_user_name}]({github_user_url} "
|
||||
"\"See @{github_user_name}'s GitHub profile\") "
|
||||
"from GitHub.\nOrigin GitHub issue: [gh#{number} - {subject}]({github_url} "
|
||||
"\"Go to 'gh#{number} - {subject}'\")\n\n"
|
||||
"{message}").format(github_user_name=github_user_name,
|
||||
github_user_url=github_user_url,
|
||||
number=number,
|
||||
subject=subject,
|
||||
github_url=github_url,
|
||||
message=comment_message)
|
||||
else:
|
||||
comment = _("Comment From GitHub:\n\n{message}").format(message=comment_message)
|
||||
|
||||
snapshot = take_snapshot(item, comment=comment, user=user)
|
||||
send_notifications(item, history=snapshot)
|
||||
return result
|
||||
|
|
|
@ -18,10 +18,8 @@
|
|||
|
||||
import uuid
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
from taiga.users.models import AuthData
|
||||
from taiga.base.utils.urls import get_absolute_url
|
||||
|
||||
|
||||
|
@ -38,18 +36,3 @@ def get_or_generate_config(project):
|
|||
url = "%s?project=%s" % (url, project.id)
|
||||
g_config["webhooks_url"] = url
|
||||
return g_config
|
||||
|
||||
|
||||
def get_github_user(github_id):
|
||||
user = None
|
||||
|
||||
if github_id:
|
||||
try:
|
||||
user = AuthData.objects.get(key="github", value=github_id).user
|
||||
except AuthData.DoesNotExist:
|
||||
pass
|
||||
|
||||
if user is None:
|
||||
user = get_user_model().objects.get(is_system=True, username__startswith="github")
|
||||
|
||||
return user
|
||||
|
|
|
@ -70,14 +70,6 @@ class GitLabViewSet(BaseWebhookApiViewSet):
|
|||
|
||||
return project_secret == secret_key
|
||||
|
||||
def _get_project(self, request):
|
||||
project_id = request.GET.get("project", None)
|
||||
try:
|
||||
project = Project.objects.get(id=project_id)
|
||||
return project
|
||||
except Project.DoesNotExist:
|
||||
return None
|
||||
|
||||
def _get_event_name(self, request):
|
||||
payload = json.loads(request.body.decode("utf-8"))
|
||||
return payload.get('object_kind', 'push') if payload is not None else 'empty'
|
||||
|
|
|
@ -19,158 +19,71 @@
|
|||
import re
|
||||
import os
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from taiga.projects.models import IssueStatus, TaskStatus, UserStoryStatus
|
||||
|
||||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.userstories.models import UserStory
|
||||
from taiga.projects.history.services import take_snapshot
|
||||
from taiga.projects.notifications.services import send_notifications
|
||||
from taiga.hooks.event_hooks import BaseEventHook
|
||||
from taiga.hooks.exceptions import ActionSyntaxException
|
||||
|
||||
from .services import get_gitlab_user
|
||||
from taiga.hooks.event_hooks import BaseNewIssueEventHook, BaseIssueCommentEventHook, BasePushEventHook
|
||||
|
||||
|
||||
class PushEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload is None:
|
||||
return
|
||||
class BaseGitLabEventHook():
|
||||
platform = "GitLab"
|
||||
platform_slug = "gitlab"
|
||||
|
||||
commits = self.payload.get("commits", [])
|
||||
for commit in commits:
|
||||
message = commit.get("message", None)
|
||||
self._process_message(message, None)
|
||||
def replace_gitlab_references(self, project_url, wiki_text):
|
||||
if wiki_text is None:
|
||||
wiki_text = ""
|
||||
|
||||
def _process_message(self, message, gitlab_user):
|
||||
"""
|
||||
The message we will be looking for seems like
|
||||
TG-XX #yyyyyy
|
||||
Where:
|
||||
XX: is the ref for us, issue or task
|
||||
yyyyyy: is the status slug we are setting
|
||||
"""
|
||||
if message is None:
|
||||
return
|
||||
|
||||
p = re.compile("tg-(\d+) +#([-\w]+)")
|
||||
for m in p.finditer(message.lower()):
|
||||
ref = m.group(1)
|
||||
status_slug = m.group(2)
|
||||
self._change_status(ref, status_slug, gitlab_user)
|
||||
|
||||
def _change_status(self, ref, status_slug, gitlab_user):
|
||||
if Issue.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Issue
|
||||
statusClass = IssueStatus
|
||||
elif Task.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = Task
|
||||
statusClass = TaskStatus
|
||||
elif UserStory.objects.filter(project=self.project, ref=ref).exists():
|
||||
modelClass = UserStory
|
||||
statusClass = UserStoryStatus
|
||||
else:
|
||||
raise ActionSyntaxException(_("The referenced element doesn't exist"))
|
||||
|
||||
element = modelClass.objects.get(project=self.project, ref=ref)
|
||||
|
||||
try:
|
||||
status = statusClass.objects.get(project=self.project, slug=status_slug)
|
||||
except statusClass.DoesNotExist:
|
||||
raise ActionSyntaxException(_("The status doesn't exist"))
|
||||
|
||||
element.status = status
|
||||
element.save()
|
||||
|
||||
snapshot = take_snapshot(element,
|
||||
comment=_("Status changed from GitLab commit"),
|
||||
user=get_gitlab_user(gitlab_user))
|
||||
send_notifications(element, history=snapshot)
|
||||
template = "\g<1>[GitLab#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
|
||||
|
||||
def replace_gitlab_references(project_url, wiki_text):
|
||||
if wiki_text is None:
|
||||
wiki_text = ""
|
||||
class IssuesEventHook(BaseGitLabEventHook, BaseNewIssueEventHook):
|
||||
def ignore(self):
|
||||
return self.payload.get('object_attributes', {}).get("action", "") != "open"
|
||||
|
||||
template = "\g<1>[GitLab#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
|
||||
|
||||
class IssuesEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload.get('object_attributes', {}).get("action", "") != "open":
|
||||
return
|
||||
|
||||
subject = self.payload.get('object_attributes', {}).get('title', None)
|
||||
def get_data(self):
|
||||
description = self.payload.get('object_attributes', {}).get('description', None)
|
||||
gitlab_reference = self.payload.get('object_attributes', {}).get('url', None)
|
||||
|
||||
project_url = None
|
||||
if gitlab_reference:
|
||||
project_url = os.path.basename(os.path.basename(gitlab_reference))
|
||||
|
||||
if not all([subject, gitlab_reference, project_url]):
|
||||
raise ActionSyntaxException(_("Invalid issue information"))
|
||||
|
||||
issue = Issue.objects.create(
|
||||
project=self.project,
|
||||
subject=subject,
|
||||
description=replace_gitlab_references(project_url, description),
|
||||
status=self.project.default_issue_status,
|
||||
type=self.project.default_issue_type,
|
||||
severity=self.project.default_severity,
|
||||
priority=self.project.default_priority,
|
||||
external_reference=['gitlab', gitlab_reference],
|
||||
owner=get_gitlab_user(None)
|
||||
)
|
||||
take_snapshot(issue, user=get_gitlab_user(None))
|
||||
|
||||
snapshot = take_snapshot(issue, comment=_("Created from GitLab"), user=get_gitlab_user(None))
|
||||
send_notifications(issue, history=snapshot)
|
||||
|
||||
|
||||
class IssueCommentEventHook(BaseEventHook):
|
||||
def process_event(self):
|
||||
if self.payload.get('object_attributes', {}).get("noteable_type", None) != "Issue":
|
||||
return
|
||||
|
||||
number = self.payload.get('issue', {}).get('iid', None)
|
||||
subject = self.payload.get('issue', {}).get('title', None)
|
||||
|
||||
project_url = self.payload.get('repository', {}).get('homepage', None)
|
||||
user_name = self.payload.get('user', {}).get('username', None)
|
||||
return {
|
||||
"number": self.payload.get('object_attributes', {}).get('iid', None),
|
||||
"subject": self.payload.get('object_attributes', {}).get('title', None),
|
||||
"url": self.payload.get('object_attributes', {}).get('url', None),
|
||||
"user_id": None,
|
||||
"user_name": user_name,
|
||||
"user_url": os.path.join(os.path.dirname(os.path.dirname(project_url)), "u", user_name),
|
||||
"description": self.replace_gitlab_references(project_url, description),
|
||||
}
|
||||
|
||||
gitlab_url = os.path.join(project_url, "issues", str(number))
|
||||
gitlab_user_name = self.payload.get('user', {}).get('username', None)
|
||||
gitlab_user_url = os.path.join(os.path.dirname(os.path.dirname(project_url)), "u", gitlab_user_name)
|
||||
|
||||
class IssueCommentEventHook(BaseGitLabEventHook, BaseIssueCommentEventHook):
|
||||
def ignore(self):
|
||||
return self.payload.get('object_attributes', {}).get("noteable_type", None) != "Issue"
|
||||
|
||||
def get_data(self):
|
||||
comment_message = self.payload.get('object_attributes', {}).get('note', None)
|
||||
comment_message = replace_gitlab_references(project_url, comment_message)
|
||||
project_url = self.payload.get('repository', {}).get('homepage', None)
|
||||
number = self.payload.get('issue', {}).get('iid', None)
|
||||
user_name = self.payload.get('user', {}).get('username', None)
|
||||
return {
|
||||
"number": number,
|
||||
"url": os.path.join(project_url, "issues", str(number)),
|
||||
"user_id": None,
|
||||
"user_name": user_name,
|
||||
"user_url": os.path.join(os.path.dirname(os.path.dirname(project_url)), "u", user_name),
|
||||
"comment_url": self.payload.get('object_attributes', {}).get('url', None),
|
||||
"comment_message": self.replace_gitlab_references(project_url, comment_message),
|
||||
}
|
||||
|
||||
user = get_gitlab_user(None)
|
||||
|
||||
if not all([comment_message, gitlab_url, project_url]):
|
||||
raise ActionSyntaxException(_("Invalid issue comment information"))
|
||||
|
||||
issues = Issue.objects.filter(external_reference=["gitlab", gitlab_url])
|
||||
tasks = Task.objects.filter(external_reference=["gitlab", gitlab_url])
|
||||
uss = UserStory.objects.filter(external_reference=["gitlab", gitlab_url])
|
||||
|
||||
for item in list(issues) + list(tasks) + list(uss):
|
||||
if number and subject and gitlab_user_name and gitlab_user_url:
|
||||
comment = _("Comment by [@{gitlab_user_name}]({gitlab_user_url} "
|
||||
"\"See @{gitlab_user_name}'s GitLab profile\") "
|
||||
"from GitLab.\nOrigin GitLab issue: [gl#{number} - {subject}]({gitlab_url} "
|
||||
"\"Go to 'gl#{number} - {subject}'\")\n\n"
|
||||
"{message}").format(gitlab_user_name=gitlab_user_name,
|
||||
gitlab_user_url=gitlab_user_url,
|
||||
number=number,
|
||||
subject=subject,
|
||||
gitlab_url=gitlab_url,
|
||||
message=comment_message)
|
||||
else:
|
||||
comment = _("Comment From GitLab:\n\n{message}").format(message=comment_message)
|
||||
|
||||
snapshot = take_snapshot(item, comment=comment, user=user)
|
||||
send_notifications(item, history=snapshot)
|
||||
class PushEventHook(BaseGitLabEventHook, BasePushEventHook):
|
||||
def get_data(self):
|
||||
result = []
|
||||
for commit in self.payload.get("commits", []):
|
||||
user_name = commit.get('author', {}).get('name', None)
|
||||
result.append({
|
||||
"user_id": None,
|
||||
"user_name": user_name,
|
||||
"user_url": None,
|
||||
"commit_id": commit.get("id", None),
|
||||
"commit_url": commit.get("url", None),
|
||||
"commit_message": commit.get("message").strip(),
|
||||
})
|
||||
return result
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
|
||||
import uuid
|
||||
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.conf import settings
|
||||
|
||||
|
@ -41,18 +40,3 @@ def get_or_generate_config(project):
|
|||
url = "{}?project={}&key={}".format(url, project.id, g_config["secret"])
|
||||
g_config["webhooks_url"] = url
|
||||
return g_config
|
||||
|
||||
|
||||
def get_gitlab_user(user_email):
|
||||
user = None
|
||||
|
||||
if user_email:
|
||||
try:
|
||||
user = get_user_model().objects.get(email=user_email)
|
||||
except get_user_model().DoesNotExist:
|
||||
pass
|
||||
|
||||
if user is None:
|
||||
user = get_user_model().objects.get(is_system=True, username__startswith="gitlab")
|
||||
|
||||
return user
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from taiga.hooks.api import BaseWebhookApiViewSet
|
||||
|
||||
from . import event_hooks
|
||||
|
||||
|
||||
class GogsViewSet(BaseWebhookApiViewSet):
|
||||
event_hook_classes = {
|
||||
"push": event_hooks.PushEventHook
|
||||
}
|
||||
|
||||
def _validate_signature(self, project, request):
|
||||
payload = self._get_payload(request)
|
||||
|
||||
if not hasattr(project, "modules_config"):
|
||||
return False
|
||||
|
||||
if project.modules_config.config is None:
|
||||
return False
|
||||
|
||||
secret = project.modules_config.config.get("gogs", {}).get("secret", None)
|
||||
if secret is None:
|
||||
return False
|
||||
|
||||
return payload.get('secret', None) == secret
|
||||
|
||||
def _get_event_name(self, request):
|
||||
return "push"
|
|
@ -0,0 +1,52 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
import os.path
|
||||
|
||||
from taiga.hooks.event_hooks import BasePushEventHook
|
||||
|
||||
|
||||
class BaseGogsEventHook():
|
||||
platform = "Gogs"
|
||||
platform_slug = "gogs"
|
||||
|
||||
def replace_gogs_references(self, project_url, wiki_text):
|
||||
if wiki_text is None:
|
||||
wiki_text = ""
|
||||
|
||||
template = "\g<1>[Gogs#\g<2>]({}/issues/\g<2>)\g<3>".format(project_url)
|
||||
return re.sub(r"(\s|^)#(\d+)(\s|$)", template, wiki_text, 0, re.M)
|
||||
|
||||
|
||||
class PushEventHook(BaseGogsEventHook, BasePushEventHook):
|
||||
def get_data(self):
|
||||
result = []
|
||||
commits = self.payload.get("commits", [])
|
||||
project_url = self.payload.get("repository", {}).get("html_url", None)
|
||||
|
||||
for commit in filter(None, commits):
|
||||
user_name = commit.get('author', {}).get('username', None)
|
||||
result.append({
|
||||
"user_id": user_name,
|
||||
"user_name": user_name,
|
||||
"user_url": os.path.join(os.path.dirname(os.path.dirname(project_url)), user_name),
|
||||
"commit_id": commit.get("id", None),
|
||||
"commit_url": commit.get("url", None),
|
||||
"commit_message": commit.get("message", None),
|
||||
})
|
||||
return result
|
|
@ -0,0 +1,41 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import models, migrations
|
||||
from django.core.files import File
|
||||
|
||||
import uuid
|
||||
import os
|
||||
|
||||
CUR_DIR = os.path.dirname(__file__)
|
||||
|
||||
|
||||
def create_gogs_system_user(apps, schema_editor):
|
||||
# We get the model from the versioned app registry;
|
||||
# if we directly import it, it'll be the wrong version
|
||||
User = apps.get_model("users", "User")
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
if not User.objects.using(db_alias).filter(is_system=True, username__startswith="gogs-").exists():
|
||||
random_hash = uuid.uuid4().hex
|
||||
user = User.objects.using(db_alias).create(
|
||||
username="gogs-{}".format(random_hash),
|
||||
email="gogs-{}@taiga.io".format(random_hash),
|
||||
full_name="Gogs",
|
||||
is_active=False,
|
||||
is_system=True,
|
||||
bio="",
|
||||
)
|
||||
f = open("{}/logo.png".format(CUR_DIR), "rb")
|
||||
user.photo.save("logo.png", File(f))
|
||||
user.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('users', '0010_auto_20150414_0936')
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(create_gogs_system_user),
|
||||
]
|
Binary file not shown.
After Width: | Height: | Size: 96 KiB |
|
@ -0,0 +1 @@
|
|||
# This file is needed to load migrations
|
|
@ -0,0 +1,37 @@
|
|||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import uuid
|
||||
|
||||
from django.core.urlresolvers import reverse
|
||||
|
||||
from taiga.base.utils.urls import get_absolute_url
|
||||
|
||||
|
||||
# Set this in settings.PROJECT_MODULES_CONFIGURATORS["gogs"]
|
||||
def get_or_generate_config(project):
|
||||
config = project.modules_config.config
|
||||
if config and "gogs" in config:
|
||||
g_config = project.modules_config.config["gogs"]
|
||||
else:
|
||||
g_config = {"secret": uuid.uuid4().hex}
|
||||
|
||||
url = reverse("gogs-hook-list")
|
||||
url = get_absolute_url(url)
|
||||
url = "%s?project=%s" % (url, project.id)
|
||||
g_config["webhooks_url"] = url
|
||||
return g_config
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -57,7 +57,9 @@ class TaigaReferencesPattern(Pattern):
|
|||
|
||||
subject = instance.content_object.subject
|
||||
|
||||
if instance.content_type.model == "userstory":
|
||||
if instance.content_type.model == "epic":
|
||||
html_classes = "reference epic"
|
||||
elif instance.content_type.model == "userstory":
|
||||
html_classes = "reference user-story"
|
||||
elif instance.content_type.model == "task":
|
||||
html_classes = "reference task"
|
||||
|
|
|
@ -126,16 +126,42 @@ def render_and_extract(project, text):
|
|||
|
||||
class DiffMatchPatch(diff_match_patch.diff_match_patch):
|
||||
def diff_pretty_html(self, diffs):
|
||||
def _sanitize_text(text):
|
||||
return (text.replace("&", "&").replace("<", "<")
|
||||
.replace(">", ">").replace("\n", "<br />"))
|
||||
|
||||
def _split_long_text(text, idx, size):
|
||||
splited_text = text.split()
|
||||
|
||||
if len(splited_text) > 25:
|
||||
if idx == 0:
|
||||
# The first is (...)text
|
||||
first = ""
|
||||
else:
|
||||
first = " ".join(splited_text[:10])
|
||||
|
||||
if idx != 0 and idx == size - 1:
|
||||
# The last is text(...)
|
||||
last = ""
|
||||
else:
|
||||
last = " ".join(splited_text[-10:])
|
||||
|
||||
return "{}(...){}".format(first, last)
|
||||
return text
|
||||
|
||||
size = len(diffs)
|
||||
html = []
|
||||
for (op, data) in diffs:
|
||||
text = (data.replace("&", "&").replace("<", "<")
|
||||
.replace(">", ">").replace("\n", "<br />"))
|
||||
for idx, (op, data) in enumerate(diffs):
|
||||
if op == self.DIFF_INSERT:
|
||||
html.append("<ins style=\"background:#e6ffe6;\">%s</ins>" % text)
|
||||
text = _sanitize_text(data)
|
||||
html.append("<ins style=\"background:#e6ffe6;\">{}</ins>".format(text))
|
||||
elif op == self.DIFF_DELETE:
|
||||
html.append("<del style=\"background:#ffe6e6;\">%s</del>" % text)
|
||||
text = _sanitize_text(data)
|
||||
html.append("<del style=\"background:#ffe6e6;\">{}</del>".format(text))
|
||||
elif op == self.DIFF_EQUAL:
|
||||
html.append("<span>%s</span>" % text)
|
||||
text = _split_long_text(_sanitize_text(data), idx, size)
|
||||
html.append("<span>{}</span>".format(text))
|
||||
|
||||
return "".join(html)
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
|
||||
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
|
||||
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
|
||||
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
|
||||
# Copyright (C) 2014-2016 Anler Hernández <hello@anler.me>
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as
|
||||
# published by the Free Software Foundation, either version 3 of the
|
||||
# License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
ANON_PERMISSIONS = [
|
||||
('view_project', _('View project')),
|
||||
('view_milestones', _('View milestones')),
|
||||
('view_epics', _('View epic')),
|
||||
('view_us', _('View user stories')),
|
||||
('view_tasks', _('View tasks')),
|
||||
('view_issues', _('View issues')),
|
||||
('view_wiki_pages', _('View wiki pages')),
|
||||
('view_wiki_links', _('View wiki links')),
|
||||
]
|
||||
|
||||
MEMBERS_PERMISSIONS = [
|
||||
('view_project', _('View project')),
|
||||
# Milestone permissions
|
||||
('view_milestones', _('View milestones')),
|
||||
('add_milestone', _('Add milestone')),
|
||||
('modify_milestone', _('Modify milestone')),
|
||||
('delete_milestone', _('Delete milestone')),
|
||||
# Epic permissions
|
||||
('view_epics', _('View epic')),
|
||||
('add_epic', _('Add epic')),
|
||||
('modify_epic', _('Modify epic')),
|
||||
('comment_epic', _('Comment epic')),
|
||||
('delete_epic', _('Delete epic')),
|
||||
# US permissions
|
||||
('view_us', _('View user story')),
|
||||
('add_us', _('Add user story')),
|
||||
('modify_us', _('Modify user story')),
|
||||
('comment_us', _('Comment user story')),
|
||||
('delete_us', _('Delete user story')),
|
||||
# Task permissions
|
||||
('view_tasks', _('View tasks')),
|
||||
('add_task', _('Add task')),
|
||||
('modify_task', _('Modify task')),
|
||||
('comment_task', _('Comment task')),
|
||||
('delete_task', _('Delete task')),
|
||||
# Issue permissions
|
||||
('view_issues', _('View issues')),
|
||||
('add_issue', _('Add issue')),
|
||||
('modify_issue', _('Modify issue')),
|
||||
('comment_issue', _('Comment issue')),
|
||||
('delete_issue', _('Delete issue')),
|
||||
# Wiki page permissions
|
||||
('view_wiki_pages', _('View wiki pages')),
|
||||
('add_wiki_page', _('Add wiki page')),
|
||||
('modify_wiki_page', _('Modify wiki page')),
|
||||
('comment_wiki_page', _('Comment wiki page')),
|
||||
('delete_wiki_page', _('Delete wiki page')),
|
||||
# Wiki link permissions
|
||||
('view_wiki_links', _('View wiki links')),
|
||||
('add_wiki_link', _('Add wiki link')),
|
||||
('modify_wiki_link', _('Modify wiki link')),
|
||||
('delete_wiki_link', _('Delete wiki link')),
|
||||
]
|
||||
|
||||
ADMINS_PERMISSIONS = [
|
||||
('modify_project', _('Modify project')),
|
||||
('delete_project', _('Delete project')),
|
||||
('add_member', _('Add member')),
|
||||
('remove_member', _('Remove member')),
|
||||
('admin_project_values', _('Admin project values')),
|
||||
('admin_roles', _('Admin roles')),
|
||||
]
|
|
@ -17,77 +17,75 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.apps import apps
|
||||
|
||||
ANON_PERMISSIONS = [
|
||||
('view_project', _('View project')),
|
||||
('view_milestones', _('View milestones')),
|
||||
('view_us', _('View user stories')),
|
||||
('view_tasks', _('View tasks')),
|
||||
('view_issues', _('View issues')),
|
||||
('view_wiki_pages', _('View wiki pages')),
|
||||
('view_wiki_links', _('View wiki links')),
|
||||
]
|
||||
from taiga.base.api.permissions import PermissionComponent
|
||||
|
||||
USER_PERMISSIONS = [
|
||||
('view_project', _('View project')),
|
||||
('view_milestones', _('View milestones')),
|
||||
('view_us', _('View user stories')),
|
||||
('view_issues', _('View issues')),
|
||||
('view_tasks', _('View tasks')),
|
||||
('view_wiki_pages', _('View wiki pages')),
|
||||
('view_wiki_links', _('View wiki links')),
|
||||
('request_membership', _('Request membership')),
|
||||
('add_us_to_project', _('Add user story to project')),
|
||||
('add_comments_to_us', _('Add comments to user stories')),
|
||||
('add_comments_to_task', _('Add comments to tasks')),
|
||||
('add_issue', _('Add issues')),
|
||||
('add_comments_to_issue', _('Add comments to issues')),
|
||||
('add_wiki_page', _('Add wiki page')),
|
||||
('modify_wiki_page', _('Modify wiki page')),
|
||||
('add_wiki_link', _('Add wiki link')),
|
||||
('modify_wiki_link', _('Modify wiki link')),
|
||||
]
|
||||
from . import services
|
||||
|
||||
MEMBERS_PERMISSIONS = [
|
||||
('view_project', _('View project')),
|
||||
# Milestone permissions
|
||||
('view_milestones', _('View milestones')),
|
||||
('add_milestone', _('Add milestone')),
|
||||
('modify_milestone', _('Modify milestone')),
|
||||
('delete_milestone', _('Delete milestone')),
|
||||
# US permissions
|
||||
('view_us', _('View user story')),
|
||||
('add_us', _('Add user story')),
|
||||
('modify_us', _('Modify user story')),
|
||||
('delete_us', _('Delete user story')),
|
||||
# Task permissions
|
||||
('view_tasks', _('View tasks')),
|
||||
('add_task', _('Add task')),
|
||||
('modify_task', _('Modify task')),
|
||||
('delete_task', _('Delete task')),
|
||||
# Issue permissions
|
||||
('view_issues', _('View issues')),
|
||||
('add_issue', _('Add issue')),
|
||||
('modify_issue', _('Modify issue')),
|
||||
('delete_issue', _('Delete issue')),
|
||||
# Wiki page permissions
|
||||
('view_wiki_pages', _('View wiki pages')),
|
||||
('add_wiki_page', _('Add wiki page')),
|
||||
('modify_wiki_page', _('Modify wiki page')),
|
||||
('delete_wiki_page', _('Delete wiki page')),
|
||||
# Wiki link permissions
|
||||
('view_wiki_links', _('View wiki links')),
|
||||
('add_wiki_link', _('Add wiki link')),
|
||||
('modify_wiki_link', _('Modify wiki link')),
|
||||
('delete_wiki_link', _('Delete wiki link')),
|
||||
]
|
||||
|
||||
ADMINS_PERMISSIONS = [
|
||||
('modify_project', _('Modify project')),
|
||||
('add_member', _('Add member')),
|
||||
('remove_member', _('Remove member')),
|
||||
('delete_project', _('Delete project')),
|
||||
('admin_project_values', _('Admin project values')),
|
||||
('admin_roles', _('Admin roles')),
|
||||
]
|
||||
######################################################################
|
||||
# Generic perms
|
||||
######################################################################
|
||||
|
||||
class HasProjectPerm(PermissionComponent):
|
||||
def __init__(self, perm, *components):
|
||||
self.project_perm = perm
|
||||
super().__init__(*components)
|
||||
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
return services.user_has_perm(request.user, self.project_perm, obj)
|
||||
|
||||
|
||||
class IsObjectOwner(PermissionComponent):
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
if obj.owner is None:
|
||||
return False
|
||||
|
||||
return obj.owner == request.user
|
||||
|
||||
|
||||
######################################################################
|
||||
# Project Perms
|
||||
######################################################################
|
||||
|
||||
class IsProjectAdmin(PermissionComponent):
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
return services.is_project_admin(request.user, obj)
|
||||
|
||||
|
||||
######################################################################
|
||||
# Common perms for stories, tasks and issues
|
||||
######################################################################
|
||||
|
||||
class CommentAndOrUpdatePerm(PermissionComponent):
|
||||
def __init__(self, update_perm, comment_perm, *components):
|
||||
self.update_perm = update_perm
|
||||
self.comment_perm = comment_perm
|
||||
super().__init__(*components)
|
||||
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
if not obj:
|
||||
return False
|
||||
|
||||
project_id = request.DATA.get('project', None)
|
||||
if project_id and obj.project_id != project_id:
|
||||
project = apps.get_model("projects", "Project").objects.get(pk=project_id)
|
||||
else:
|
||||
project = obj.project
|
||||
|
||||
data_keys = request.DATA.keys()
|
||||
|
||||
if (not services.user_has_perm(request.user, self.comment_perm, project) and
|
||||
"comment" in data_keys):
|
||||
# User can't comment but there is a comment in the request
|
||||
#raise exc.PermissionDenied(_("You don't have permissions to comment this."))
|
||||
return False
|
||||
|
||||
if (not services.user_has_perm(request.user, self.update_perm, project) and
|
||||
len(data_keys - "comment")):
|
||||
# User can't update but there is a change in the request
|
||||
#raise exc.PermissionDenied(_("You don't have permissions to update this."))
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -17,10 +17,11 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from .permissions import ADMINS_PERMISSIONS, MEMBERS_PERMISSIONS, ANON_PERMISSIONS, USER_PERMISSIONS
|
||||
from .choices import ADMINS_PERMISSIONS, MEMBERS_PERMISSIONS, ANON_PERMISSIONS
|
||||
|
||||
from django.apps import apps
|
||||
|
||||
|
||||
def _get_user_project_membership(user, project, cache="user"):
|
||||
"""
|
||||
cache param determines how memberships are calculated trying to reuse the existing data
|
||||
|
@ -77,58 +78,69 @@ def user_has_perm(user, perm, obj=None, cache="user"):
|
|||
in cache
|
||||
"""
|
||||
project = _get_object_project(obj)
|
||||
|
||||
if not project:
|
||||
return False
|
||||
|
||||
return perm in get_user_project_permissions(user, project, cache=cache)
|
||||
|
||||
|
||||
def role_has_perm(role, perm):
|
||||
return perm in role.permissions
|
||||
|
||||
|
||||
def _get_membership_permissions(membership):
|
||||
if membership and membership.role and membership.role.permissions:
|
||||
return membership.role.permissions
|
||||
return []
|
||||
|
||||
|
||||
def get_user_project_permissions(user, project, cache="user"):
|
||||
"""
|
||||
cache param determines how memberships are calculated trying to reuse the existing data
|
||||
in cache
|
||||
"""
|
||||
membership = _get_user_project_membership(user, project, cache=cache)
|
||||
if user.is_superuser:
|
||||
def calculate_permissions(is_authenticated=False, is_superuser=False, is_member=False,
|
||||
is_admin=False, role_permissions=[], anon_permissions=[],
|
||||
public_permissions=[]):
|
||||
if is_superuser:
|
||||
admins_permissions = list(map(lambda perm: perm[0], ADMINS_PERMISSIONS))
|
||||
members_permissions = list(map(lambda perm: perm[0], MEMBERS_PERMISSIONS))
|
||||
public_permissions = list(map(lambda perm: perm[0], USER_PERMISSIONS))
|
||||
public_permissions = []
|
||||
anon_permissions = list(map(lambda perm: perm[0], ANON_PERMISSIONS))
|
||||
elif membership:
|
||||
if membership.is_admin:
|
||||
elif is_member:
|
||||
if is_admin:
|
||||
admins_permissions = list(map(lambda perm: perm[0], ADMINS_PERMISSIONS))
|
||||
members_permissions = list(map(lambda perm: perm[0], MEMBERS_PERMISSIONS))
|
||||
else:
|
||||
admins_permissions = []
|
||||
members_permissions = []
|
||||
members_permissions = members_permissions + _get_membership_permissions(membership)
|
||||
public_permissions = project.public_permissions if project.public_permissions is not None else []
|
||||
anon_permissions = project.anon_permissions if project.anon_permissions is not None else []
|
||||
elif user.is_authenticated():
|
||||
members_permissions = members_permissions + role_permissions
|
||||
public_permissions = public_permissions if public_permissions is not None else []
|
||||
anon_permissions = anon_permissions if anon_permissions is not None else []
|
||||
elif is_authenticated:
|
||||
admins_permissions = []
|
||||
members_permissions = []
|
||||
public_permissions = project.public_permissions if project.public_permissions is not None else []
|
||||
anon_permissions = project.anon_permissions if project.anon_permissions is not None else []
|
||||
public_permissions = public_permissions if public_permissions is not None else []
|
||||
anon_permissions = anon_permissions if anon_permissions is not None else []
|
||||
else:
|
||||
admins_permissions = []
|
||||
members_permissions = []
|
||||
public_permissions = []
|
||||
anon_permissions = project.anon_permissions if project.anon_permissions is not None else []
|
||||
anon_permissions = anon_permissions if anon_permissions is not None else []
|
||||
|
||||
return set(admins_permissions + members_permissions + public_permissions + anon_permissions)
|
||||
|
||||
|
||||
def get_user_project_permissions(user, project, cache="user"):
|
||||
"""
|
||||
cache param determines how memberships are calculated trying to reuse the existing data
|
||||
in cache
|
||||
"""
|
||||
membership = _get_user_project_membership(user, project, cache=cache)
|
||||
is_member = membership is not None
|
||||
is_admin = is_member and membership.is_admin
|
||||
return calculate_permissions(
|
||||
is_authenticated = user.is_authenticated(),
|
||||
is_superuser = user.is_superuser,
|
||||
is_member = is_member,
|
||||
is_admin = is_admin,
|
||||
role_permissions = _get_membership_permissions(membership),
|
||||
anon_permissions = project.anon_permissions,
|
||||
public_permissions = project.public_permissions
|
||||
)
|
||||
|
||||
|
||||
def set_base_permissions_for_project(project):
|
||||
if project.is_private:
|
||||
project.anon_permissions = []
|
|
@ -35,6 +35,9 @@ class MembershipAdmin(admin.ModelAdmin):
|
|||
list_display_links = list_display
|
||||
raw_id_fields = ["project"]
|
||||
|
||||
def has_add_permission(self, request):
|
||||
return False
|
||||
|
||||
def get_object(self, *args, **kwargs):
|
||||
self.obj = super().get_object(*args, **kwargs)
|
||||
return self.obj
|
||||
|
@ -103,8 +106,7 @@ class ProjectAdmin(admin.ModelAdmin):
|
|||
(_("Extra info"), {
|
||||
"classes": ("collapse",),
|
||||
"fields": ("creation_template",
|
||||
("is_looking_for_people", "looking_for_people_note"),
|
||||
"tags_colors"),
|
||||
("is_looking_for_people", "looking_for_people_note")),
|
||||
}),
|
||||
(_("Modules"), {
|
||||
"classes": ("collapse",),
|
||||
|
|
|
@ -22,59 +22,58 @@ from dateutil.relativedelta import relativedelta
|
|||
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.db.models import signals, Prefetch
|
||||
from django.db.models import Value as V
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.http import Http404
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.utils import timezone
|
||||
from django.http import Http404
|
||||
|
||||
from django_pglocks import advisory_lock
|
||||
|
||||
from taiga.base import filters
|
||||
from taiga.base import response
|
||||
from taiga.base import exceptions as exc
|
||||
from taiga.base.decorators import list_route
|
||||
from taiga.base.decorators import detail_route
|
||||
from taiga.base import response
|
||||
from taiga.base.api import ModelCrudViewSet, ModelListViewSet
|
||||
from taiga.base.api.mixins import BlockedByProjectMixin, BlockeableSaveMixin, BlockeableDeleteMixin
|
||||
from taiga.base.api.permissions import AllowAnyPermission
|
||||
from taiga.base.api.utils import get_object_or_404
|
||||
from taiga.base.decorators import list_route
|
||||
from taiga.base.decorators import detail_route
|
||||
from taiga.base.utils.slug import slugify_uniquely
|
||||
|
||||
from taiga.permissions import services as permissions_services
|
||||
|
||||
from taiga.projects.epics.models import Epic
|
||||
from taiga.projects.history.mixins import HistoryResourceMixin
|
||||
from taiga.projects.notifications.models import NotifyPolicy
|
||||
from taiga.projects.notifications.mixins import WatchedResourceMixin, WatchersViewSetMixin
|
||||
from taiga.projects.notifications.choices import NotifyLevel
|
||||
|
||||
from taiga.projects.mixins.ordering import BulkUpdateOrderMixin
|
||||
from taiga.projects.mixins.on_destroy import MoveOnDestroyMixin
|
||||
|
||||
from taiga.projects.userstories.models import UserStory, RolePoints
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.issues.models import Issue
|
||||
from taiga.projects.likes.mixins.viewsets import LikedResourceMixin, FansViewSetMixin
|
||||
from taiga.permissions import service as permissions_service
|
||||
from taiga.users import services as users_service
|
||||
from taiga.projects.notifications.mixins import WatchersViewSetMixin
|
||||
from taiga.projects.notifications.choices import NotifyLevel
|
||||
from taiga.projects.mixins.on_destroy import MoveOnDestroyMixin
|
||||
from taiga.projects.mixins.ordering import BulkUpdateOrderMixin
|
||||
from taiga.projects.tasks.models import Task
|
||||
from taiga.projects.tagging.api import TagsColorsResourceMixin
|
||||
from taiga.projects.userstories.models import UserStory, RolePoints
|
||||
|
||||
from . import filters as project_filters
|
||||
from . import models
|
||||
from . import permissions
|
||||
from . import serializers
|
||||
from . import validators
|
||||
from . import services
|
||||
|
||||
from . import utils as project_utils
|
||||
|
||||
######################################################
|
||||
## Project
|
||||
# Project
|
||||
######################################################
|
||||
|
||||
|
||||
class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
||||
BlockeableSaveMixin, BlockeableDeleteMixin, ModelCrudViewSet):
|
||||
|
||||
BlockeableSaveMixin, BlockeableDeleteMixin,
|
||||
TagsColorsResourceMixin, ModelCrudViewSet):
|
||||
validator_class = validators.ProjectValidator
|
||||
queryset = models.Project.objects.all()
|
||||
serializer_class = serializers.ProjectDetailSerializer
|
||||
admin_serializer_class = serializers.ProjectDetailAdminSerializer
|
||||
list_serializer_class = serializers.ProjectSerializer
|
||||
permission_classes = (permissions.ProjectPermission, )
|
||||
filter_backends = (project_filters.QFilterBackend,
|
||||
filter_backends = (project_filters.UserOrderFilterBackend,
|
||||
project_filters.QFilterBackend,
|
||||
project_filters.CanViewProjectObjFilterBackend,
|
||||
project_filters.DiscoverModeFilterBackend)
|
||||
|
||||
|
@ -85,8 +84,7 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
"is_kanban_activated")
|
||||
|
||||
ordering = ("name", "id")
|
||||
order_by_fields = ("memberships__user_order",
|
||||
"total_fans",
|
||||
order_by_fields = ("total_fans",
|
||||
"total_fans_last_week",
|
||||
"total_fans_last_month",
|
||||
"total_fans_last_year",
|
||||
|
@ -106,53 +104,38 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
|
||||
def get_queryset(self):
|
||||
qs = super().get_queryset()
|
||||
|
||||
qs = qs.select_related("owner")
|
||||
# Prefetch doesn"t work correctly if then if the field is filtered later (it generates more queries)
|
||||
# so we add some custom prefetching
|
||||
qs = qs.prefetch_related("members")
|
||||
qs = qs.prefetch_related("memberships")
|
||||
qs = qs.prefetch_related(Prefetch("notify_policies",
|
||||
NotifyPolicy.objects.exclude(notify_level=NotifyLevel.none), to_attr="valid_notify_policies"))
|
||||
|
||||
Milestone = apps.get_model("milestones", "Milestone")
|
||||
qs = qs.prefetch_related(Prefetch("milestones",
|
||||
Milestone.objects.filter(closed=True), to_attr="closed_milestones"))
|
||||
qs = project_utils.attach_extra_info(qs, user=self.request.user)
|
||||
|
||||
# If filtering an activity period we must exclude the activities not updated recently enough
|
||||
now = timezone.now()
|
||||
order_by_field_name = self._get_order_by_field_name()
|
||||
if order_by_field_name == "total_fans_last_week":
|
||||
qs = qs.filter(totals_updated_datetime__gte=now-relativedelta(weeks=1))
|
||||
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(weeks=1))
|
||||
elif order_by_field_name == "total_fans_last_month":
|
||||
qs = qs.filter(totals_updated_datetime__gte=now-relativedelta(months=1))
|
||||
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(months=1))
|
||||
elif order_by_field_name == "total_fans_last_year":
|
||||
qs = qs.filter(totals_updated_datetime__gte=now-relativedelta(years=1))
|
||||
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(years=1))
|
||||
elif order_by_field_name == "total_activity_last_week":
|
||||
qs = qs.filter(totals_updated_datetime__gte=now-relativedelta(weeks=1))
|
||||
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(weeks=1))
|
||||
elif order_by_field_name == "total_activity_last_month":
|
||||
qs = qs.filter(totals_updated_datetime__gte=now-relativedelta(months=1))
|
||||
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(months=1))
|
||||
elif order_by_field_name == "total_activity_last_year":
|
||||
qs = qs.filter(totals_updated_datetime__gte=now-relativedelta(years=1))
|
||||
qs = qs.filter(totals_updated_datetime__gte=now - relativedelta(years=1))
|
||||
|
||||
return qs
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
if self.action == "by_slug":
|
||||
self.lookup_field = "slug"
|
||||
|
||||
return super().retrieve(request, *args, **kwargs)
|
||||
|
||||
def get_serializer_class(self):
|
||||
serializer_class = self.serializer_class
|
||||
|
||||
if self.action == "list":
|
||||
serializer_class = self.list_serializer_class
|
||||
elif self.action != "create":
|
||||
if self.action == "by_slug":
|
||||
slug = self.request.QUERY_PARAMS.get("slug", None)
|
||||
project = get_object_or_404(models.Project, slug=slug)
|
||||
else:
|
||||
project = self.get_object()
|
||||
return serializers.ProjectSerializer
|
||||
|
||||
if permissions_service.is_project_admin(self.request.user, project):
|
||||
serializer_class = self.admin_serializer_class
|
||||
|
||||
return serializer_class
|
||||
return serializers.ProjectDetailSerializer
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def change_logo(self, request, *args, **kwargs):
|
||||
|
@ -215,11 +198,11 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
if self.request.user.is_anonymous():
|
||||
return response.Unauthorized()
|
||||
|
||||
serializer = serializers.UpdateProjectOrderBulkSerializer(data=request.DATA, many=True)
|
||||
if not serializer.is_valid():
|
||||
return response.BadRequest(serializer.errors)
|
||||
validator = validators.UpdateProjectOrderBulkValidator(data=request.DATA, many=True)
|
||||
if not validator.is_valid():
|
||||
return response.BadRequest(validator.errors)
|
||||
|
||||
data = serializer.data
|
||||
data = validator.data
|
||||
services.update_projects_order_in_bulk(data, "user_order", request.user)
|
||||
return response.NoContent(data=None)
|
||||
|
||||
|
@ -234,20 +217,22 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
if not template_description:
|
||||
raise response.BadRequest(_("Not valid template description"))
|
||||
|
||||
template_slug = slugify_uniquely(template_name, models.ProjectTemplate)
|
||||
with advisory_lock("create-project-template") as acquired_key_lock:
|
||||
template_slug = slugify_uniquely(template_name, models.ProjectTemplate)
|
||||
|
||||
project = self.get_object()
|
||||
project = self.get_object()
|
||||
|
||||
self.check_permissions(request, 'create_template', project)
|
||||
self.check_permissions(request, 'create_template', project)
|
||||
|
||||
template = models.ProjectTemplate(
|
||||
name=template_name,
|
||||
slug=template_slug,
|
||||
description=template_description,
|
||||
)
|
||||
template = models.ProjectTemplate(
|
||||
name=template_name,
|
||||
slug=template_slug,
|
||||
description=template_description,
|
||||
)
|
||||
|
||||
template.load_data_from_project(project)
|
||||
template.save()
|
||||
template.load_data_from_project(project)
|
||||
|
||||
template.save()
|
||||
return response.Created(serializers.ProjectTemplateSerializer(template).data)
|
||||
|
||||
@detail_route(methods=['POST'])
|
||||
|
@ -258,6 +243,20 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
services.remove_user_from_project(request.user, project)
|
||||
return response.Ok()
|
||||
|
||||
def _regenerate_csv_uuid(self, project, field):
|
||||
uuid_value = uuid.uuid4().hex
|
||||
setattr(project, field, uuid_value)
|
||||
project.save()
|
||||
return uuid_value
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def regenerate_epics_csv_uuid(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
self.check_permissions(request, "regenerate_epics_csv_uuid", project)
|
||||
self.pre_conditions_on_save(project)
|
||||
data = {"uuid": self._regenerate_csv_uuid(project, "epics_csv_uuid")}
|
||||
return response.Ok(data)
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def regenerate_userstories_csv_uuid(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
|
@ -266,14 +265,6 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
data = {"uuid": self._regenerate_csv_uuid(project, "userstories_csv_uuid")}
|
||||
return response.Ok(data)
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def regenerate_issues_csv_uuid(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
self.check_permissions(request, "regenerate_issues_csv_uuid", project)
|
||||
self.pre_conditions_on_save(project)
|
||||
data = {"uuid": self._regenerate_csv_uuid(project, "issues_csv_uuid")}
|
||||
return response.Ok(data)
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def regenerate_tasks_csv_uuid(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
|
@ -282,11 +273,18 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
data = {"uuid": self._regenerate_csv_uuid(project, "tasks_csv_uuid")}
|
||||
return response.Ok(data)
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def regenerate_issues_csv_uuid(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
self.check_permissions(request, "regenerate_issues_csv_uuid", project)
|
||||
self.pre_conditions_on_save(project)
|
||||
data = {"uuid": self._regenerate_csv_uuid(project, "issues_csv_uuid")}
|
||||
return response.Ok(data)
|
||||
|
||||
@list_route(methods=["GET"])
|
||||
def by_slug(self, request):
|
||||
def by_slug(self, request, *args, **kwargs):
|
||||
slug = request.QUERY_PARAMS.get("slug", None)
|
||||
project = get_object_or_404(models.Project, slug=slug)
|
||||
return self.retrieve(request, pk=project.pk)
|
||||
return self.retrieve(request, slug=slug)
|
||||
|
||||
@detail_route(methods=["GET", "PATCH"])
|
||||
def modules(self, request, pk=None):
|
||||
|
@ -309,12 +307,6 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
self.check_permissions(request, "stats", project)
|
||||
return response.Ok(services.get_stats_for_project(project))
|
||||
|
||||
def _regenerate_csv_uuid(self, project, field):
|
||||
uuid_value = uuid.uuid4().hex
|
||||
setattr(project, field, uuid_value)
|
||||
project.save()
|
||||
return uuid_value
|
||||
|
||||
@detail_route(methods=["GET"])
|
||||
def member_stats(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
|
@ -327,12 +319,6 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
self.check_permissions(request, "issues_stats", project)
|
||||
return response.Ok(services.get_stats_for_project_issues(project))
|
||||
|
||||
@detail_route(methods=["GET"])
|
||||
def tags_colors(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
self.check_permissions(request, "tags_colors", project)
|
||||
return response.Ok(dict(project.tags_colors))
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
def transfer_validate_token(self, request, pk=None):
|
||||
project = self.get_object()
|
||||
|
@ -368,7 +354,7 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
return response.BadRequest(_("The user must be already a project member"))
|
||||
|
||||
reason = request.DATA.get('reason', None)
|
||||
transfer_token = services.start_project_transfer(project, user, reason)
|
||||
services.start_project_transfer(project, user, reason)
|
||||
return response.Ok()
|
||||
|
||||
@detail_route(methods=["POST"])
|
||||
|
@ -405,6 +391,10 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
services.reject_project_transfer(project, request.user, token, reason)
|
||||
return response.Ok()
|
||||
|
||||
def _raise_if_blocked(self, project):
|
||||
if self.is_blocked(project):
|
||||
raise exc.Blocked(_("Blocked element"))
|
||||
|
||||
def _set_base_permissions(self, obj):
|
||||
update_permissions = False
|
||||
if not obj.id:
|
||||
|
@ -417,7 +407,7 @@ class ProjectViewSet(LikedResourceMixin, HistoryResourceMixin,
|
|||
update_permissions = True
|
||||
|
||||
if update_permissions:
|
||||
permissions_service.set_base_permissions_for_project(obj)
|
||||
permissions_services.set_base_permissions_for_project(obj)
|
||||
|
||||
def pre_save(self, obj):
|
||||
if not obj.id:
|
||||
|
@ -468,20 +458,21 @@ class ProjectWatchersViewSet(WatchersViewSetMixin, ModelListViewSet):
|
|||
## Custom values for selectors
|
||||
######################################################
|
||||
|
||||
class PointsViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
||||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
class EpicStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
||||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
|
||||
model = models.Points
|
||||
serializer_class = serializers.PointsSerializer
|
||||
permission_classes = (permissions.PointsPermission,)
|
||||
model = models.EpicStatus
|
||||
serializer_class = serializers.EpicStatusSerializer
|
||||
validator_class = validators.EpicStatusValidator
|
||||
permission_classes = (permissions.EpicStatusPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ('project',)
|
||||
bulk_update_param = "bulk_points"
|
||||
bulk_update_perm = "change_points"
|
||||
bulk_update_order_action = services.bulk_update_points_order
|
||||
move_on_destroy_related_class = RolePoints
|
||||
move_on_destroy_related_field = "points"
|
||||
move_on_destroy_project_default_field = "default_points"
|
||||
bulk_update_param = "bulk_epic_statuses"
|
||||
bulk_update_perm = "change_epicstatus"
|
||||
bulk_update_order_action = services.bulk_update_epic_status_order
|
||||
move_on_destroy_related_class = Epic
|
||||
move_on_destroy_related_field = "status"
|
||||
move_on_destroy_project_default_field = "default_epic_status"
|
||||
|
||||
|
||||
class UserStoryStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
||||
|
@ -489,6 +480,7 @@ class UserStoryStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
|
||||
model = models.UserStoryStatus
|
||||
serializer_class = serializers.UserStoryStatusSerializer
|
||||
validator_class = validators.UserStoryStatusValidator
|
||||
permission_classes = (permissions.UserStoryStatusPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ('project',)
|
||||
|
@ -500,11 +492,29 @@ class UserStoryStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
move_on_destroy_project_default_field = "default_us_status"
|
||||
|
||||
|
||||
class PointsViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
||||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
|
||||
model = models.Points
|
||||
serializer_class = serializers.PointsSerializer
|
||||
validator_class = validators.PointsValidator
|
||||
permission_classes = (permissions.PointsPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ('project',)
|
||||
bulk_update_param = "bulk_points"
|
||||
bulk_update_perm = "change_points"
|
||||
bulk_update_order_action = services.bulk_update_points_order
|
||||
move_on_destroy_related_class = RolePoints
|
||||
move_on_destroy_related_field = "points"
|
||||
move_on_destroy_project_default_field = "default_points"
|
||||
|
||||
|
||||
class TaskStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
||||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
|
||||
model = models.TaskStatus
|
||||
serializer_class = serializers.TaskStatusSerializer
|
||||
validator_class = validators.TaskStatusValidator
|
||||
permission_classes = (permissions.TaskStatusPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ("project",)
|
||||
|
@ -521,6 +531,7 @@ class SeverityViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
|
||||
model = models.Severity
|
||||
serializer_class = serializers.SeveritySerializer
|
||||
validator_class = validators.SeverityValidator
|
||||
permission_classes = (permissions.SeverityPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ("project",)
|
||||
|
@ -536,6 +547,7 @@ class PriorityViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
model = models.Priority
|
||||
serializer_class = serializers.PrioritySerializer
|
||||
validator_class = validators.PriorityValidator
|
||||
permission_classes = (permissions.PriorityPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ("project",)
|
||||
|
@ -551,6 +563,7 @@ class IssueTypeViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
model = models.IssueType
|
||||
serializer_class = serializers.IssueTypeSerializer
|
||||
validator_class = validators.IssueTypeValidator
|
||||
permission_classes = (permissions.IssueTypePermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ("project",)
|
||||
|
@ -566,6 +579,7 @@ class IssueStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
ModelCrudViewSet, BulkUpdateOrderMixin):
|
||||
model = models.IssueStatus
|
||||
serializer_class = serializers.IssueStatusSerializer
|
||||
validator_class = validators.IssueStatusValidator
|
||||
permission_classes = (permissions.IssueStatusPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ("project",)
|
||||
|
@ -584,6 +598,7 @@ class IssueStatusViewSet(MoveOnDestroyMixin, BlockedByProjectMixin,
|
|||
class ProjectTemplateViewSet(ModelCrudViewSet):
|
||||
model = models.ProjectTemplate
|
||||
serializer_class = serializers.ProjectTemplateSerializer
|
||||
validator_class = validators.ProjectTemplateValidator
|
||||
permission_classes = (permissions.ProjectTemplatePermission,)
|
||||
|
||||
def get_queryset(self):
|
||||
|
@ -597,7 +612,9 @@ class ProjectTemplateViewSet(ModelCrudViewSet):
|
|||
class MembershipViewSet(BlockedByProjectMixin, ModelCrudViewSet):
|
||||
model = models.Membership
|
||||
admin_serializer_class = serializers.MembershipAdminSerializer
|
||||
admin_validator_class = validators.MembershipAdminValidator
|
||||
serializer_class = serializers.MembershipSerializer
|
||||
validator_class = validators.MembershipValidator
|
||||
permission_classes = (permissions.MembershipPermission,)
|
||||
filter_backends = (filters.CanViewProjectFilterBackend,)
|
||||
filter_fields = ("project", "role")
|
||||
|
@ -609,12 +626,12 @@ class MembershipViewSet(BlockedByProjectMixin, ModelCrudViewSet):
|
|||
use_admin_serializer = True
|
||||
|
||||
if self.action == "retrieve":
|
||||
use_admin_serializer = permissions_service.is_project_admin(self.request.user, self.object.project)
|
||||
use_admin_serializer = permissions_services.is_project_admin(self.request.user, self.object.project)
|
||||
|
||||
project_id = self.request.QUERY_PARAMS.get("project", None)
|
||||
if self.action == "list" and project_id is not None:
|
||||
project = get_object_or_404(models.Project, pk=project_id)
|
||||
use_admin_serializer = permissions_service.is_project_admin(self.request.user, project)
|
||||
use_admin_serializer = permissions_services.is_project_admin(self.request.user, project)
|
||||
|
||||
if use_admin_serializer:
|
||||
return self.admin_serializer_class
|
||||
|
@ -622,6 +639,12 @@ class MembershipViewSet(BlockedByProjectMixin, ModelCrudViewSet):
|
|||
else:
|
||||
return self.serializer_class
|
||||
|
||||
def get_validator_class(self):
|
||||
if self.action == "create":
|
||||
return self.admin_validator_class
|
||||
|
||||
return self.validator_class
|
||||
|
||||
def _check_if_project_can_have_more_memberships(self, project, total_new_memberships):
|
||||
(can_add_memberships, error_type) = services.check_if_project_can_have_more_memberships(
|
||||
project,
|
||||
|
@ -636,11 +659,11 @@ class MembershipViewSet(BlockedByProjectMixin, ModelCrudViewSet):
|
|||
|
||||
@list_route(methods=["POST"])
|
||||
def bulk_create(self, request, **kwargs):
|
||||
serializer = serializers.MembersBulkSerializer(data=request.DATA)
|
||||
if not serializer.is_valid():
|
||||
return response.BadRequest(serializer.errors)
|
||||
validator = validators.MembersBulkValidator(data=request.DATA)
|
||||
if not validator.is_valid():
|
||||
return response.BadRequest(validator.errors)
|
||||
|
||||
data = serializer.data
|
||||
data = validator.data
|
||||
project = models.Project.objects.get(id=data["project_id"])
|
||||
invitation_extra_text = data.get("invitation_extra_text", None)
|
||||
self.check_permissions(request, 'bulk_create', project)
|
||||
|
@ -657,7 +680,7 @@ class MembershipViewSet(BlockedByProjectMixin, ModelCrudViewSet):
|
|||
invitation_extra_text=invitation_extra_text,
|
||||
callback=self.post_save,
|
||||
precall=self.pre_save)
|
||||
except ValidationError as err:
|
||||
except exc.ValidationError as err:
|
||||
return response.BadRequest(err.message_dict)
|
||||
|
||||
members_serialized = self.admin_serializer_class(members, many=True)
|
||||
|
|
|
@ -25,18 +25,16 @@ from django.db.models import signals
|
|||
|
||||
def connect_projects_signals():
|
||||
from . import signals as handlers
|
||||
from .tagging import signals as tagging_handlers
|
||||
# On project object is created apply template.
|
||||
signals.post_save.connect(handlers.project_post_save,
|
||||
sender=apps.get_model("projects", "Project"),
|
||||
dispatch_uid='project_post_save')
|
||||
|
||||
# Tags normalization after save a project
|
||||
signals.pre_save.connect(handlers.tags_normalization,
|
||||
signals.pre_save.connect(tagging_handlers.tags_normalization,
|
||||
sender=apps.get_model("projects", "Project"),
|
||||
dispatch_uid="tags_normalization_projects")
|
||||
signals.pre_save.connect(handlers.update_project_tags_when_create_or_edit_taggable_item,
|
||||
sender=apps.get_model("projects", "Project"),
|
||||
dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_projects")
|
||||
|
||||
|
||||
def disconnect_projects_signals():
|
||||
|
@ -44,8 +42,6 @@ def disconnect_projects_signals():
|
|||
dispatch_uid='project_post_save')
|
||||
signals.pre_save.disconnect(sender=apps.get_model("projects", "Project"),
|
||||
dispatch_uid="tags_normalization_projects")
|
||||
signals.pre_save.disconnect(sender=apps.get_model("projects", "Project"),
|
||||
dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_projects")
|
||||
|
||||
|
||||
## Memberships Signals
|
||||
|
|
|
@ -34,6 +34,7 @@ from taiga.projects.history.mixins import HistoryResourceMixin
|
|||
|
||||
from . import permissions
|
||||
from . import serializers
|
||||
from . import validators
|
||||
from . import models
|
||||
|
||||
|
||||
|
@ -42,6 +43,7 @@ class BaseAttachmentViewSet(HistoryResourceMixin, WatchedResourceMixin,
|
|||
|
||||
model = models.Attachment
|
||||
serializer_class = serializers.AttachmentSerializer
|
||||
validator_class = validators.AttachmentValidator
|
||||
filter_fields = ["project", "object_id"]
|
||||
|
||||
content_type = None
|
||||
|
@ -63,6 +65,9 @@ class BaseAttachmentViewSet(HistoryResourceMixin, WatchedResourceMixin,
|
|||
obj.size = obj.attached_file.size
|
||||
obj.name = path.basename(obj.attached_file.name)
|
||||
|
||||
if obj.content_object is None:
|
||||
raise exc.WrongArguments(_("Object id issue isn't exists"))
|
||||
|
||||
if obj.project_id != obj.content_object.project_id:
|
||||
raise exc.WrongArguments(_("Project ID not matches between object and project"))
|
||||
|
||||
|
@ -72,12 +77,18 @@ class BaseAttachmentViewSet(HistoryResourceMixin, WatchedResourceMixin,
|
|||
# NOTE: When destroy an attachment, the content_object change
|
||||
# after and not before
|
||||
self.persist_history_snapshot(obj, delete=True)
|
||||
super().pre_delete(obj)
|
||||
super().post_delete(obj)
|
||||
|
||||
def get_object_for_snapshot(self, obj):
|
||||
return obj.content_object
|
||||
|
||||
|
||||
class EpicAttachmentViewSet(BaseAttachmentViewSet):
|
||||
permission_classes = (permissions.EpicAttachmentPermission,)
|
||||
filter_backends = (filters.CanViewEpicAttachmentFilterBackend,)
|
||||
content_type = "epics.epic"
|
||||
|
||||
|
||||
class UserStoryAttachmentViewSet(BaseAttachmentViewSet):
|
||||
permission_classes = (permissions.UserStoryAttachmentPermission,)
|
||||
filter_backends = (filters.CanViewUserStoryAttachmentFilterBackend,)
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.9.2 on 2016-06-17 12:33
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('attachments', '0005_attachment_sha1'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterIndexTogether(
|
||||
name='attachment',
|
||||
index_together=set([('content_type', 'object_id')]),
|
||||
),
|
||||
]
|
|
@ -70,6 +70,7 @@ class Attachment(models.Model):
|
|||
permissions = (
|
||||
("view_attachment", "Can view attachment"),
|
||||
)
|
||||
index_together = [("content_type", "object_id")]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Attachment, self).__init__(*args, **kwargs)
|
||||
|
|
|
@ -28,6 +28,15 @@ class IsAttachmentOwnerPerm(PermissionComponent):
|
|||
return False
|
||||
|
||||
|
||||
class EpicAttachmentPermission(TaigaResourcePermission):
|
||||
retrieve_perms = HasProjectPerm('view_epics') | IsAttachmentOwnerPerm()
|
||||
create_perms = HasProjectPerm('modify_epic')
|
||||
update_perms = HasProjectPerm('modify_epic') | IsAttachmentOwnerPerm()
|
||||
partial_update_perms = HasProjectPerm('modify_epic') | IsAttachmentOwnerPerm()
|
||||
destroy_perms = HasProjectPerm('modify_epic') | IsAttachmentOwnerPerm()
|
||||
list_perms = AllowAny()
|
||||
|
||||
|
||||
class UserStoryAttachmentPermission(TaigaResourcePermission):
|
||||
retrieve_perms = HasProjectPerm('view_us') | IsAttachmentOwnerPerm()
|
||||
create_perms = HasProjectPerm('modify_us')
|
||||
|
@ -67,7 +76,9 @@ class WikiAttachmentPermission(TaigaResourcePermission):
|
|||
class RawAttachmentPerm(PermissionComponent):
|
||||
def check_permissions(self, request, view, obj=None):
|
||||
is_owner = IsAttachmentOwnerPerm().check_permissions(request, view, obj)
|
||||
if obj.content_type.app_label == "userstories" and obj.content_type.model == "userstory":
|
||||
if obj.content_type.app_label == "epics" and obj.content_type.model == "epic":
|
||||
return EpicAttachmentPermission(request, view).check_permissions('retrieve', obj) or is_owner
|
||||
elif obj.content_type.app_label == "userstories" and obj.content_type.model == "userstory":
|
||||
return UserStoryAttachmentPermission(request, view).check_permissions('retrieve', obj) or is_owner
|
||||
elif obj.content_type.app_label == "tasks" and obj.content_type.model == "task":
|
||||
return TaskAttachmentPermission(request, view).check_permissions('retrieve', obj) or is_owner
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue