@@ -109,7 +152,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
@@ -118,7 +161,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
@@ -128,7 +171,7 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
@@ -136,20 +179,22 @@ For self hosting environment setup, visit the [Self Hosting](https://docs.plane.
-## 📚Documentation
-
-For full documentation, visit [docs.plane.so](https://docs.plane.so/)
-
-To see how to Contribute, visit [here](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md).
+## ⛓️ Security
-## ❤️ Community
+If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
-The Plane community can be found on GitHub Discussions, where you can ask questions, voice ideas, and share your projects.
+Email squawk@plane.so to disclose any security vulnerabilities.
-To chat with other community members you can join the [Plane Discord](https://discord.com/invite/A92xrEGCge).
+## ❤️ Contribute
-Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) applies to all Plane community channels.
+There are many ways to contribute to Plane, including:
+- Submitting [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) and [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+) for various components.
+- Reviewing [the documentation](https://docs.plane.so/) and submitting [pull requests](https://github.com/makeplane/plane), from fixing typos to adding new features.
+- Speaking or writing about Plane or any other ecosystem integration and [letting us know](https://discord.com/invite/A92xrEGCge)!
+- Upvoting [popular feature requests](https://github.com/makeplane/plane/issues) to show your support.
-## ⛓️ Security
+### We couldn't have done this without you.
-If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email engineering@plane.so to disclose any security vulnerabilities.
+
+
+
\ No newline at end of file
diff --git a/apiserver/package.json b/apiserver/package.json
index fb4f8441d43..060944406ca 100644
--- a/apiserver/package.json
+++ b/apiserver/package.json
@@ -1,4 +1,4 @@
{
"name": "plane-api",
- "version": "0.15.1"
+ "version": "0.16.0"
}
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index 6f66c373ec6..84931f46be9 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -45,7 +45,10 @@ def get_queryset(self):
return (
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
@@ -390,7 +393,10 @@ def get_queryset(self):
)
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.filter(cycle_id=self.kwargs.get("cycle_id"))
.select_related("project")
.select_related("workspace")
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index a759b15f6e0..bf3313779c2 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -352,7 +352,10 @@ def get_queryset(self):
return (
Label.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.select_related("parent")
@@ -481,7 +484,10 @@ def get_queryset(self):
IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
@@ -607,11 +613,11 @@ def get_queryset(self):
)
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- .select_related("issue")
- .select_related("actor")
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .select_related("workspace", "project", "issue", "actor")
.annotate(
is_member=Exists(
ProjectMember.objects.filter(
@@ -647,6 +653,33 @@ def get(self, request, slug, project_id, issue_id, pk=None):
)
def post(self, request, slug, project_id, issue_id):
+
+ # Validation check if the issue already exists
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and IssueComment.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ issue_comment = IssueComment.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "Issue Comment with the same external id and external source already exists",
+ "id": str(issue_comment.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+
serializer = IssueCommentSerializer(data=request.data)
if serializer.is_valid():
serializer.save(
@@ -680,6 +713,29 @@ def patch(self, request, slug, project_id, issue_id, pk):
IssueCommentSerializer(issue_comment).data,
cls=DjangoJSONEncoder,
)
+
+ # Validation check if the issue already exists
+ if (
+ request.data.get("external_id")
+ and (issue_comment.external_id != str(request.data.get("external_id")))
+ and IssueComment.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get(
+ "external_source", issue_comment.external_source
+ ),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Issue Comment with the same external id and external source already exists",
+ "id": str(issue_comment.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+
serializer = IssueCommentSerializer(
issue_comment, data=request.data, partial=True
)
@@ -734,6 +790,7 @@ def get(self, request, slug, project_id, issue_id, pk=None):
.filter(
~Q(field__in=["comment", "vote", "reaction", "draft"]),
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
)
.select_related("actor", "workspace", "issue", "project")
).order_by(request.GET.get("order_by", "created_at"))
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index d509a53c79d..2e5bb85e2b7 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -273,7 +273,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(module_id=self.kwargs.get("module_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.select_related("module")
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index 0a262a071d4..ec10f9babe1 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -1,7 +1,5 @@
-# Python imports
-from itertools import groupby
-
# Django imports
+from django.db import IntegrityError
from django.db.models import Q
# Third party imports
@@ -26,7 +24,10 @@ def get_queryset(self):
return (
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.filter(~Q(name="Triage"))
.select_related("project")
.select_related("workspace")
@@ -34,37 +35,51 @@ def get_queryset(self):
)
def post(self, request, slug, project_id):
- serializer = StateSerializer(
- data=request.data, context={"project_id": project_id}
- )
- if serializer.is_valid():
- if (
- request.data.get("external_id")
- and request.data.get("external_source")
- and State.objects.filter(
- project_id=project_id,
- workspace__slug=slug,
- external_source=request.data.get("external_source"),
- external_id=request.data.get("external_id"),
- ).exists()
- ):
- state = State.objects.filter(
- workspace__slug=slug,
- project_id=project_id,
- external_id=request.data.get("external_id"),
- external_source=request.data.get("external_source"),
- ).first()
- return Response(
- {
- "error": "State with the same external id and external source already exists",
- "id": str(state.id),
- },
- status=status.HTTP_409_CONFLICT,
- )
+ try:
+ serializer = StateSerializer(
+ data=request.data, context={"project_id": project_id}
+ )
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and State.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ state = State.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "State with the same external id and external source already exists",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
- serializer.save(project_id=project_id)
- return Response(serializer.data, status=status.HTTP_200_OK)
- return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ serializer.save(project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except IntegrityError as e:
+ state = State.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ name=request.data.get("name"),
+ ).first()
+ return Response(
+ {
+ "error": "State with the same name already exists in the project",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
def get(self, request, slug, project_id, state_id=None):
if state_id:
diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py
index 28e88106031..9bdd4baaf9d 100644
--- a/apiserver/plane/app/serializers/__init__.py
+++ b/apiserver/plane/app/serializers/__init__.py
@@ -69,9 +69,13 @@
RelatedIssueSerializer,
IssuePublicSerializer,
IssueDetailSerializer,
+ IssueReactionLiteSerializer,
+ IssueAttachmentLiteSerializer,
+ IssueLinkLiteSerializer,
)
from .module import (
+ ModuleDetailSerializer,
ModuleWriteSerializer,
ModuleSerializer,
ModuleIssueSerializer,
diff --git a/apiserver/plane/app/serializers/base.py b/apiserver/plane/app/serializers/base.py
index 446fdb6d537..6693ba931ce 100644
--- a/apiserver/plane/app/serializers/base.py
+++ b/apiserver/plane/app/serializers/base.py
@@ -58,9 +58,12 @@ def _filter_fields(self, fields):
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
- IssueFlatSerializer,
+ IssueLiteSerializer,
IssueRelationSerializer,
- InboxIssueLiteSerializer
+ InboxIssueLiteSerializer,
+ IssueReactionLiteSerializer,
+ IssueAttachmentLiteSerializer,
+ IssueLinkLiteSerializer,
)
# Expansion mapper
@@ -79,12 +82,34 @@ def _filter_fields(self, fields):
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
- "parent": IssueSerializer,
+ "parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
- "issue_inbox" : InboxIssueLiteSerializer,
+ "issue_inbox": InboxIssueLiteSerializer,
+ "issue_reactions": IssueReactionLiteSerializer,
+ "issue_attachment": IssueAttachmentLiteSerializer,
+ "issue_link": IssueLinkLiteSerializer,
+ "sub_issues": IssueLiteSerializer,
}
-
- self.fields[field] = expansion[field](many=True if field in ["members", "assignees", "labels", "issue_cycle", "issue_relation", "issue_inbox"] else False)
+
+ self.fields[field] = expansion[field](
+ many=(
+ True
+ if field
+ in [
+ "members",
+ "assignees",
+ "labels",
+ "issue_cycle",
+ "issue_relation",
+ "issue_inbox",
+ "issue_reactions",
+ "issue_attachment",
+ "issue_link",
+ "sub_issues",
+ ]
+ else False
+ )
+ )
return self.fields
@@ -105,7 +130,11 @@ def to_representation(self, instance):
LabelSerializer,
CycleIssueSerializer,
IssueRelationSerializer,
- InboxIssueLiteSerializer
+ InboxIssueLiteSerializer,
+ IssueLiteSerializer,
+ IssueReactionLiteSerializer,
+ IssueAttachmentLiteSerializer,
+ IssueLinkLiteSerializer,
)
# Expansion mapper
@@ -124,9 +153,13 @@ def to_representation(self, instance):
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
- "parent": IssueSerializer,
+ "parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
- "issue_inbox" : InboxIssueLiteSerializer,
+ "issue_inbox": InboxIssueLiteSerializer,
+ "issue_reactions": IssueReactionLiteSerializer,
+ "issue_attachment": IssueAttachmentLiteSerializer,
+ "issue_link": IssueLinkLiteSerializer,
+ "sub_issues": IssueLiteSerializer,
}
# Check if field in expansion then expand the field
if expand in expansion:
diff --git a/apiserver/plane/app/serializers/cycle.py b/apiserver/plane/app/serializers/cycle.py
index 77c3f16cc75..a273b349c3d 100644
--- a/apiserver/plane/app/serializers/cycle.py
+++ b/apiserver/plane/app/serializers/cycle.py
@@ -3,10 +3,7 @@
# Module imports
from .base import BaseSerializer
-from .user import UserLiteSerializer
from .issue import IssueStateSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
from plane.db.models import (
Cycle,
CycleIssue,
@@ -14,7 +11,6 @@
CycleUserProperties,
)
-
class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
if (
@@ -30,65 +26,57 @@ def validate(self, data):
class Meta:
model = Cycle
fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "owned_by",
+ ]
class CycleSerializer(BaseSerializer):
+ # favorite
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
+ # state group wise distribution
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
- assignees = serializers.SerializerMethodField(read_only=True)
- total_estimates = serializers.IntegerField(read_only=True)
- completed_estimates = serializers.IntegerField(read_only=True)
- started_estimates = serializers.IntegerField(read_only=True)
- workspace_detail = WorkspaceLiteSerializer(
- read_only=True, source="workspace"
- )
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- status = serializers.CharField(read_only=True)
-
- def validate(self, data):
- if (
- data.get("start_date", None) is not None
- and data.get("end_date", None) is not None
- and data.get("start_date", None) > data.get("end_date", None)
- ):
- raise serializers.ValidationError(
- "Start date cannot exceed end date"
- )
- return data
-
- def get_assignees(self, obj):
- members = [
- {
- "avatar": assignee.avatar,
- "display_name": assignee.display_name,
- "id": assignee.id,
- }
- for issue_cycle in obj.issue_cycle.prefetch_related(
- "issue__assignees"
- ).all()
- for assignee in issue_cycle.issue.assignees.all()
- ]
- # Use a set comprehension to return only the unique objects
- unique_objects = {frozenset(item.items()) for item in members}
- # Convert the set back to a list of dictionaries
- unique_list = [dict(item) for item in unique_objects]
+ # active | draft | upcoming | completed
+ status = serializers.CharField(read_only=True)
- return unique_list
class Meta:
model = Cycle
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "owned_by",
+ fields = [
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "status",
]
+ read_only_fields = fields
class CycleIssueSerializer(BaseSerializer):
diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py
index 90069bd41bd..411c5b73f88 100644
--- a/apiserver/plane/app/serializers/issue.py
+++ b/apiserver/plane/app/serializers/issue.py
@@ -444,6 +444,22 @@ def create(self, validated_data):
return IssueLink.objects.create(**validated_data)
+class IssueLinkLiteSerializer(BaseSerializer):
+
+ class Meta:
+ model = IssueLink
+ fields = [
+ "id",
+ "issue_id",
+ "title",
+ "url",
+ "metadata",
+ "created_by_id",
+ "created_at",
+ ]
+ read_only_fields = fields
+
+
class IssueAttachmentSerializer(BaseSerializer):
class Meta:
model = IssueAttachment
@@ -459,6 +475,21 @@ class Meta:
]
+class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
+
+ class Meta:
+ model = IssueAttachment
+ fields = [
+ "id",
+ "asset",
+ "attributes",
+ "issue_id",
+ "updated_at",
+ "updated_by_id",
+ ]
+ read_only_fields = fields
+
+
class IssueReactionSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
@@ -473,6 +504,18 @@ class Meta:
]
+class IssueReactionLiteSerializer(DynamicBaseSerializer):
+
+ class Meta:
+ model = IssueReaction
+ fields = [
+ "id",
+ "actor_id",
+ "issue_id",
+ "reaction",
+ ]
+
+
class CommentReactionSerializer(BaseSerializer):
class Meta:
model = CommentReaction
@@ -503,9 +546,7 @@ class IssueCommentSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
)
- comment_reactions = CommentReactionSerializer(
- read_only=True, many=True
- )
+ comment_reactions = CommentReactionSerializer(read_only=True, many=True)
is_member = serializers.BooleanField(read_only=True)
class Meta:
@@ -558,18 +599,17 @@ class Meta:
class IssueSerializer(DynamicBaseSerializer):
# ids
- project_id = serializers.PrimaryKeyRelatedField(read_only=True)
- state_id = serializers.PrimaryKeyRelatedField(read_only=True)
- parent_id = serializers.PrimaryKeyRelatedField(read_only=True)
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
- module_ids = serializers.SerializerMethodField()
+ module_ids = serializers.ListField(
+ child=serializers.UUIDField(), required=False,
+ )
# Many to many
- label_ids = serializers.PrimaryKeyRelatedField(
- read_only=True, many=True, source="labels"
+ label_ids = serializers.ListField(
+ child=serializers.UUIDField(), required=False,
)
- assignee_ids = serializers.PrimaryKeyRelatedField(
- read_only=True, many=True, source="assignees"
+ assignee_ids = serializers.ListField(
+ child=serializers.UUIDField(), required=False,
)
# Count items
@@ -577,9 +617,6 @@ class IssueSerializer(DynamicBaseSerializer):
attachment_count = serializers.IntegerField(read_only=True)
link_count = serializers.IntegerField(read_only=True)
- # is_subscribed
- is_subscribed = serializers.BooleanField(read_only=True)
-
class Meta:
model = Issue
fields = [
@@ -606,57 +643,45 @@ class Meta:
"updated_by",
"attachment_count",
"link_count",
- "is_subscribed",
"is_draft",
"archived_at",
]
read_only_fields = fields
- def get_module_ids(self, obj):
- # Access the prefetched modules and extract module IDs
- return [module for module in obj.issue_module.values_list("module_id", flat=True)]
-
class IssueDetailSerializer(IssueSerializer):
- description_html = serializers.CharField()
+ description_html = serializers.CharField()
+ is_subscribed = serializers.BooleanField(read_only=True)
class Meta(IssueSerializer.Meta):
- fields = IssueSerializer.Meta.fields + ['description_html']
+ fields = IssueSerializer.Meta.fields + [
+ "description_html",
+ "is_subscribed",
+ ]
class IssueLiteSerializer(DynamicBaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(
- read_only=True, source="workspace"
- )
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateLiteSerializer(read_only=True, source="state")
- label_details = LabelLiteSerializer(
- read_only=True, source="labels", many=True
- )
- assignee_details = UserLiteSerializer(
- read_only=True, source="assignees", many=True
- )
- sub_issues_count = serializers.IntegerField(read_only=True)
- cycle_id = serializers.UUIDField(read_only=True)
- module_id = serializers.UUIDField(read_only=True)
- attachment_count = serializers.IntegerField(read_only=True)
- link_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionSerializer(read_only=True, many=True)
class Meta:
model = Issue
- fields = "__all__"
- read_only_fields = [
- "start_date",
- "target_date",
- "completed_at",
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
+ fields = [
+ "id",
+ "sequence_id",
+ "project_id",
]
+ read_only_fields = fields
+
+
+class IssueDetailSerializer(IssueSerializer):
+ description_html = serializers.CharField()
+ is_subscribed = serializers.BooleanField()
+
+ class Meta(IssueSerializer.Meta):
+ fields = IssueSerializer.Meta.fields + [
+ "description_html",
+ "is_subscribed",
+ ]
+ read_only_fields = fields
class IssuePublicSerializer(BaseSerializer):
diff --git a/apiserver/plane/app/serializers/module.py b/apiserver/plane/app/serializers/module.py
index e9419567182..4aabfc50efd 100644
--- a/apiserver/plane/app/serializers/module.py
+++ b/apiserver/plane/app/serializers/module.py
@@ -5,7 +5,6 @@
from .base import BaseSerializer, DynamicBaseSerializer
from .user import UserLiteSerializer
from .project import ProjectLiteSerializer
-from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
@@ -19,17 +18,18 @@
class ModuleWriteSerializer(BaseSerializer):
- members = serializers.ListField(
+ lead_id = serializers.PrimaryKeyRelatedField(
+ source="lead",
+ queryset=User.objects.all(),
+ required=False,
+ allow_null=True,
+ )
+ member_ids = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(
- source="workspace", read_only=True
- )
-
class Meta:
model = Module
fields = "__all__"
@@ -44,7 +44,9 @@ class Meta:
def to_representation(self, instance):
data = super().to_representation(instance)
- data["members"] = [str(member.id) for member in instance.members.all()]
+ data["member_ids"] = [
+ str(member.id) for member in instance.members.all()
+ ]
return data
def validate(self, data):
@@ -59,12 +61,10 @@ def validate(self, data):
return data
def create(self, validated_data):
- members = validated_data.pop("members", None)
-
+ members = validated_data.pop("member_ids", None)
project = self.context["project"]
module = Module.objects.create(**validated_data, project=project)
-
if members is not None:
ModuleMember.objects.bulk_create(
[
@@ -85,7 +85,7 @@ def create(self, validated_data):
return module
def update(self, instance, validated_data):
- members = validated_data.pop("members", None)
+ members = validated_data.pop("member_ids", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
@@ -142,7 +142,6 @@ class Meta:
class ModuleLinkSerializer(BaseSerializer):
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
class Meta:
model = ModuleLink
@@ -170,12 +169,9 @@ def create(self, validated_data):
class ModuleSerializer(DynamicBaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- lead_detail = UserLiteSerializer(read_only=True, source="lead")
- members_detail = UserLiteSerializer(
- read_only=True, many=True, source="members"
+ member_ids = serializers.ListField(
+ child=serializers.UUIDField(), required=False, allow_null=True
)
- link_module = ModuleLinkSerializer(read_only=True, many=True)
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
@@ -186,15 +182,46 @@ class ModuleSerializer(DynamicBaseSerializer):
class Meta:
model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
+ fields = [
+ # Required fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # Model fields
+ "name",
+ "description",
+ "description_text",
+ "description_html",
+ "start_date",
+ "target_date",
+ "status",
+ "lead_id",
+ "member_ids",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ # computed fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
"created_at",
"updated_at",
]
+ read_only_fields = fields
+
+
+
+class ModuleDetailSerializer(ModuleSerializer):
+
+ link_module = ModuleLinkSerializer(read_only=True, many=True)
+
+ class Meta(ModuleSerializer.Meta):
+ fields = ModuleSerializer.Meta.fields + ['link_module']
class ModuleFavoriteSerializer(BaseSerializer):
diff --git a/apiserver/plane/app/urls/issue.py b/apiserver/plane/app/urls/issue.py
index 234c2824dd7..4ee70450b37 100644
--- a/apiserver/plane/app/urls/issue.py
+++ b/apiserver/plane/app/urls/issue.py
@@ -2,6 +2,7 @@
from plane.app.views import (
+ IssueListEndpoint,
IssueViewSet,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
@@ -25,6 +26,11 @@
urlpatterns = [
+ path(
+ "workspaces//projects//issues/list/",
+ IssueListEndpoint.as_view(),
+ name="project-issue",
+ ),
path(
"workspaces//projects//issues/",
IssueViewSet.as_view(
@@ -84,11 +90,13 @@
BulkImportIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
+ # deprecated endpoint TODO: remove once confirmed
path(
"workspaces//my-issues/",
UserWorkSpaceIssues.as_view(),
name="workspace-issues",
),
+ ##
path(
"workspaces//projects//issues//sub-issues/",
SubIssuesEndpoint.as_view(),
@@ -251,23 +259,15 @@
name="project-issue-archive",
),
path(
- "workspaces//projects//archived-issues//",
+ "workspaces//projects//issues//archive/",
IssueArchiveViewSet.as_view(
{
"get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="project-issue-archive",
- ),
- path(
- "workspaces//projects//unarchive//",
- IssueArchiveViewSet.as_view(
- {
- "post": "unarchive",
+ "post": "archive",
+ "delete": "unarchive",
}
),
- name="project-issue-archive",
+ name="project-issue-archive-unarchive",
),
## End Issue Archives
## Issue Relation
diff --git a/apiserver/plane/app/urls/workspace.py b/apiserver/plane/app/urls/workspace.py
index 7e64e586aaf..a70ff18e535 100644
--- a/apiserver/plane/app/urls/workspace.py
+++ b/apiserver/plane/app/urls/workspace.py
@@ -22,6 +22,8 @@
WorkspaceUserPropertiesEndpoint,
WorkspaceStatesEndpoint,
WorkspaceEstimatesEndpoint,
+ WorkspaceModulesEndpoint,
+ WorkspaceCyclesEndpoint,
)
@@ -219,4 +221,14 @@
WorkspaceEstimatesEndpoint.as_view(),
name="workspace-estimate",
),
+ path(
+ "workspaces//modules/",
+ WorkspaceModulesEndpoint.as_view(),
+ name="workspace-modules",
+ ),
+ path(
+ "workspaces//cycles/",
+ WorkspaceCyclesEndpoint.as_view(),
+ name="workspace-cycles",
+ ),
]
diff --git a/apiserver/plane/app/views/__init__.py b/apiserver/plane/app/views/__init__.py
index 0a959a667b7..d4a13e49749 100644
--- a/apiserver/plane/app/views/__init__.py
+++ b/apiserver/plane/app/views/__init__.py
@@ -49,6 +49,8 @@
WorkspaceUserPropertiesEndpoint,
WorkspaceStatesEndpoint,
WorkspaceEstimatesEndpoint,
+ WorkspaceModulesEndpoint,
+ WorkspaceCyclesEndpoint,
)
from .state import StateViewSet
from .view import (
@@ -67,6 +69,7 @@
)
from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
from .issue import (
+ IssueListEndpoint,
IssueViewSet,
WorkSpaceIssuesEndpoint,
IssueActivityEndpoint,
diff --git a/apiserver/plane/app/views/analytic.py b/apiserver/plane/app/views/analytic.py
index 04a77f789e3..6eb914b236e 100644
--- a/apiserver/plane/app/views/analytic.py
+++ b/apiserver/plane/app/views/analytic.py
@@ -1,6 +1,7 @@
# Django imports
from django.db.models import Count, Sum, F, Q
from django.db.models.functions import ExtractMonth
+from django.utils import timezone
# Third party imports
from rest_framework import status
@@ -331,8 +332,9 @@ def get(self, request, slug):
.order_by("state_group")
)
+ current_year = timezone.now().year
issue_completed_month_wise = (
- base_issues.filter(completed_at__isnull=False)
+ base_issues.filter(completed_at__year=current_year)
.annotate(month=ExtractMonth("completed_at"))
.values("month")
.annotate(count=Count("*"))
diff --git a/apiserver/plane/app/views/config.py b/apiserver/plane/app/views/config.py
index 29b4bbf8bb5..b2a27252cba 100644
--- a/apiserver/plane/app/views/config.py
+++ b/apiserver/plane/app/views/config.py
@@ -66,15 +66,15 @@ def get(self, request):
},
{
"key": "SLACK_CLIENT_ID",
- "default": os.environ.get("SLACK_CLIENT_ID", "1"),
+ "default": os.environ.get("SLACK_CLIENT_ID", None),
},
{
"key": "POSTHOG_API_KEY",
- "default": os.environ.get("POSTHOG_API_KEY", "1"),
+ "default": os.environ.get("POSTHOG_API_KEY", None),
},
{
"key": "POSTHOG_HOST",
- "default": os.environ.get("POSTHOG_HOST", "1"),
+ "default": os.environ.get("POSTHOG_HOST", None),
},
{
"key": "UNSPLASH_ACCESS_KEY",
@@ -181,11 +181,11 @@ def get(self, request):
},
{
"key": "POSTHOG_API_KEY",
- "default": os.environ.get("POSTHOG_API_KEY", "1"),
+ "default": os.environ.get("POSTHOG_API_KEY", None),
},
{
"key": "POSTHOG_HOST",
- "default": os.environ.get("POSTHOG_HOST", "1"),
+ "default": os.environ.get("POSTHOG_HOST", None),
},
{
"key": "UNSPLASH_ACCESS_KEY",
diff --git a/apiserver/plane/app/views/cycle.py b/apiserver/plane/app/views/cycle.py
index 63d8d28aea6..85e1e9f2e84 100644
--- a/apiserver/plane/app/views/cycle.py
+++ b/apiserver/plane/app/views/cycle.py
@@ -20,7 +20,10 @@
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
-from django.core.serializers.json import DjangoJSONEncoder
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
# Third party imports
from rest_framework.response import Response
@@ -33,7 +36,6 @@
CycleIssueSerializer,
CycleFavoriteSerializer,
IssueSerializer,
- IssueStateSerializer,
CycleWriteSerializer,
CycleUserPropertiesSerializer,
)
@@ -51,7 +53,6 @@
IssueAttachment,
Label,
CycleUserProperties,
- IssueSubscriber,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.issue_filters import issue_filters
@@ -73,7 +74,7 @@ def perform_create(self, serializer):
)
def get_queryset(self):
- subquery = CycleFavorite.objects.filter(
+ favorite_subquery = CycleFavorite.objects.filter(
user=self.request.user,
cycle_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
@@ -84,11 +85,28 @@ def get_queryset(self):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
- .select_related("project")
- .select_related("workspace")
- .select_related("owned_by")
- .annotate(is_favorite=Exists(subquery))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .select_related("project", "workspace", "owned_by")
+ .prefetch_related(
+ Prefetch(
+ "issue_cycle__issue__assignees",
+ queryset=User.objects.only(
+ "avatar", "first_name", "id"
+ ).distinct(),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_cycle__issue__labels",
+ queryset=Label.objects.only(
+ "name", "color", "id"
+ ).distinct(),
+ )
+ )
+ .annotate(is_favorite=Exists(favorite_subquery))
.annotate(
total_issues=Count(
"issue_cycle",
@@ -148,29 +166,6 @@ def get_queryset(self):
),
)
)
- .annotate(
- total_estimates=Sum("issue_cycle__issue__estimate_point")
- )
- .annotate(
- completed_estimates=Sum(
- "issue_cycle__issue__estimate_point",
- filter=Q(
- issue_cycle__issue__state__group="completed",
- issue_cycle__issue__archived_at__isnull=True,
- issue_cycle__issue__is_draft=False,
- ),
- )
- )
- .annotate(
- started_estimates=Sum(
- "issue_cycle__issue__estimate_point",
- filter=Q(
- issue_cycle__issue__state__group="started",
- issue_cycle__issue__archived_at__isnull=True,
- issue_cycle__issue__is_draft=False,
- ),
- )
- )
.annotate(
status=Case(
When(
@@ -190,20 +185,16 @@ def get_queryset(self):
output_field=CharField(),
)
)
- .prefetch_related(
- Prefetch(
- "issue_cycle__issue__assignees",
- queryset=User.objects.only(
- "avatar", "first_name", "id"
- ).distinct(),
- )
- )
- .prefetch_related(
- Prefetch(
- "issue_cycle__issue__labels",
- queryset=Label.objects.only(
- "name", "color", "id"
- ).distinct(),
+ .annotate(
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "issue_cycle__issue__assignees__id",
+ distinct=True,
+ filter=~Q(
+ issue_cycle__issue__assignees__id__isnull=True
+ ),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
)
)
.order_by("-is_favorite", "name")
@@ -213,12 +204,8 @@ def get_queryset(self):
def list(self, request, slug, project_id):
queryset = self.get_queryset()
cycle_view = request.GET.get("cycle_view", "all")
- fields = [
- field
- for field in request.GET.get("fields", "").split(",")
- if field
- ]
+ # Update the order by
queryset = queryset.order_by("-is_favorite", "-created_at")
# Current Cycle
@@ -228,9 +215,35 @@ def list(self, request, slug, project_id):
end_date__gte=timezone.now(),
)
- data = CycleSerializer(queryset, many=True).data
+ data = queryset.values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ )
- if len(data):
+ if data:
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=data[0]["id"],
@@ -315,19 +328,45 @@ def list(self, request, slug, project_id):
}
if data[0]["start_date"] and data[0]["end_date"]:
- data[0]["distribution"][
- "completion_chart"
- ] = burndown_plot(
- queryset=queryset.first(),
- slug=slug,
- project_id=project_id,
- cycle_id=data[0]["id"],
+ data[0]["distribution"]["completion_chart"] = (
+ burndown_plot(
+ queryset=queryset.first(),
+ slug=slug,
+ project_id=project_id,
+ cycle_id=data[0]["id"],
+ )
)
return Response(data, status=status.HTTP_200_OK)
- cycles = CycleSerializer(queryset, many=True).data
- return Response(cycles, status=status.HTTP_200_OK)
+ data = queryset.values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ )
+ return Response(data, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
if (
@@ -337,7 +376,7 @@ def create(self, request, slug, project_id):
request.data.get("start_date", None) is not None
and request.data.get("end_date", None) is not None
):
- serializer = CycleSerializer(data=request.data)
+ serializer = CycleWriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(
project_id=project_id,
@@ -346,12 +385,36 @@ def create(self, request, slug, project_id):
cycle = (
self.get_queryset()
.filter(pk=serializer.data["id"])
+ .values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ )
.first()
)
- serializer = CycleSerializer(cycle)
- return Response(
- serializer.data, status=status.HTTP_201_CREATED
- )
+ return Response(cycle, status=status.HTTP_201_CREATED)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
@@ -364,10 +427,11 @@ def create(self, request, slug, project_id):
)
def partial_update(self, request, slug, project_id, pk):
- cycle = Cycle.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
+ queryset = (
+ self.get_queryset()
+ .filter(workspace__slug=slug, project_id=project_id, pk=pk)
)
-
+ cycle = queryset.first()
request_data = request.data
if (
@@ -375,7 +439,7 @@ def partial_update(self, request, slug, project_id, pk):
and cycle.end_date < timezone.now().date()
):
if "sort_order" in request_data:
- # Can only change sort order
+ # Can only change sort order for a completed cycle``
request_data = {
"sort_order": request_data.get(
"sort_order", cycle.sort_order
@@ -394,12 +458,71 @@ def partial_update(self, request, slug, project_id, pk):
)
if serializer.is_valid():
serializer.save()
- return Response(serializer.data, status=status.HTTP_200_OK)
+ cycle = queryset.values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ ).first()
+ return Response(cycle, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk):
- queryset = self.get_queryset().get(pk=pk)
-
+ queryset = self.get_queryset().filter(pk=pk)
+ data = (
+ self.get_queryset()
+ .filter(pk=pk)
+ .values(
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "assignee_ids",
+ "status",
+ )
+ .first()
+ )
+ queryset = queryset.first()
# Assignee Distribution
assignee_distribution = (
Issue.objects.filter(
@@ -488,7 +611,6 @@ def retrieve(self, request, slug, project_id, pk):
.order_by("label_name")
)
- data = CycleSerializer(queryset).data
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
@@ -570,7 +692,10 @@ def get_queryset(self):
)
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.filter(cycle_id=self.kwargs.get("cycle_id"))
.select_related("project")
.select_related("workspace")
@@ -589,20 +714,18 @@ def list(self, request, slug, project_id, cycle_id):
]
order_by = request.GET.get("order_by", "created_at")
filters = issue_filters(request.query_params, "GET")
- issues = (
+ queryset = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(
- parent=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
.filter(project_id=project_id)
.filter(workspace__slug=slug)
+ .filter(**filters)
.select_related("workspace", "project", "state", "parent")
- .prefetch_related("assignees", "labels", "issue_module__module")
+ .prefetch_related(
+ "assignees",
+ "labels",
+ "issue_module__module",
+ "issue_cycle__cycle",
+ )
.order_by(order_by)
.filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
@@ -621,22 +744,79 @@ def list(self, request, slug, project_id, cycle_id):
.values("count")
)
.annotate(
- is_subscribed=Exists(
- IssueSubscriber.objects.filter(
- subscriber=self.request.user, issue_id=OuterRef("id")
- )
+ sub_issues_count=Issue.issue_objects.filter(
+ parent=OuterRef("id")
)
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
+ .order_by(order_by)
)
- serializer = IssueSerializer(
- issues, many=True, fields=fields if fields else None
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
+ if self.fields:
+ issues = IssueSerializer(
+ queryset, many=True, fields=fields if fields else None
+ ).data
+ else:
+ issues = queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
+ return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id, cycle_id):
issues = request.data.get("issues", [])
- if not len(issues):
+ if not issues:
return Response(
{"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
@@ -658,52 +838,52 @@ def create(self, request, slug, project_id, cycle_id):
)
# Get all CycleIssues already created
- cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
- update_cycle_issue_activity = []
- record_to_create = []
- records_to_update = []
-
- for issue in issues:
- cycle_issue = [
- cycle_issue
- for cycle_issue in cycle_issues
- if str(cycle_issue.issue_id) in issues
- ]
- # Update only when cycle changes
- if len(cycle_issue):
- if cycle_issue[0].cycle_id != cycle_id:
- update_cycle_issue_activity.append(
- {
- "old_cycle_id": str(cycle_issue[0].cycle_id),
- "new_cycle_id": str(cycle_id),
- "issue_id": str(cycle_issue[0].issue_id),
- }
- )
- cycle_issue[0].cycle_id = cycle_id
- records_to_update.append(cycle_issue[0])
- else:
- record_to_create.append(
- CycleIssue(
- project_id=project_id,
- workspace=cycle.workspace,
- created_by=request.user,
- updated_by=request.user,
- cycle=cycle,
- issue_id=issue,
- )
- )
-
- CycleIssue.objects.bulk_create(
- record_to_create,
- batch_size=10,
- ignore_conflicts=True,
+ cycle_issues = list(
+ CycleIssue.objects.filter(
+ ~Q(cycle_id=cycle_id), issue_id__in=issues
+ )
)
- CycleIssue.objects.bulk_update(
- records_to_update,
- ["cycle"],
+ existing_issues = [
+ str(cycle_issue.issue_id) for cycle_issue in cycle_issues
+ ]
+ new_issues = list(set(issues) - set(existing_issues))
+
+ # New issues to create
+ created_records = CycleIssue.objects.bulk_create(
+ [
+ CycleIssue(
+ project_id=project_id,
+ workspace_id=cycle.workspace_id,
+ created_by_id=request.user.id,
+ updated_by_id=request.user.id,
+ cycle_id=cycle_id,
+ issue_id=issue,
+ )
+ for issue in new_issues
+ ],
batch_size=10,
)
+ # Updated Issues
+ updated_records = []
+ update_cycle_issue_activity = []
+ # Iterate over each cycle_issue in cycle_issues
+ for cycle_issue in cycle_issues:
+ # Update the cycle_issue's cycle_id
+ cycle_issue.cycle_id = cycle_id
+ # Add the modified cycle_issue to the records_to_update list
+ updated_records.append(cycle_issue)
+ # Record the update activity
+ update_cycle_issue_activity.append(
+ {
+ "old_cycle_id": str(cycle_issue.cycle_id),
+ "new_cycle_id": str(cycle_id),
+ "issue_id": str(cycle_issue.issue_id),
+ }
+ )
+
+ # Update the cycle issues
+ CycleIssue.objects.bulk_update(updated_records, ["cycle_id"], batch_size=100)
# Capture Issue Activity
issue_activity.delay(
type="cycle.activity.created",
@@ -715,7 +895,7 @@ def create(self, request, slug, project_id, cycle_id):
{
"updated_cycle_issues": update_cycle_issue_activity,
"created_cycle_issues": serializers.serialize(
- "json", record_to_create
+ "json", created_records
),
}
),
@@ -723,16 +903,7 @@ def create(self, request, slug, project_id, cycle_id):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
-
- # Return all Cycle Issues
- issues = self.get_queryset().values_list("issue_id", flat=True)
-
- return Response(
- IssueSerializer(
- Issue.objects.filter(pk__in=issues), many=True
- ).data,
- status=status.HTTP_200_OK,
- )
+ return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, cycle_id, issue_id):
cycle_issue = CycleIssue.objects.get(
@@ -776,6 +947,7 @@ def post(self, request, slug, project_id):
status=status.HTTP_400_BAD_REQUEST,
)
+ # Check if any cycle intersects in the given interval
cycles = Cycle.objects.filter(
Q(workspace__slug=slug)
& Q(project_id=project_id)
@@ -785,7 +957,6 @@ def post(self, request, slug, project_id):
| Q(start_date__gte=start_date, end_date__lte=end_date)
)
).exclude(pk=cycle_id)
-
if cycles.exists():
return Response(
{
@@ -909,29 +1080,6 @@ def post(self, request, slug, project_id, cycle_id):
),
)
)
- .annotate(
- total_estimates=Sum("issue_cycle__issue__estimate_point")
- )
- .annotate(
- completed_estimates=Sum(
- "issue_cycle__issue__estimate_point",
- filter=Q(
- issue_cycle__issue__state__group="completed",
- issue_cycle__issue__archived_at__isnull=True,
- issue_cycle__issue__is_draft=False,
- ),
- )
- )
- .annotate(
- started_estimates=Sum(
- "issue_cycle__issue__estimate_point",
- filter=Q(
- issue_cycle__issue__state__group="started",
- issue_cycle__issue__archived_at__isnull=True,
- issue_cycle__issue__is_draft=False,
- ),
- )
- )
)
# Pass the new_cycle queryset to burndown_plot
@@ -942,6 +1090,7 @@ def post(self, request, slug, project_id, cycle_id):
cycle_id=cycle_id,
)
+ # Get the assignee distribution
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
@@ -980,7 +1129,22 @@ def post(self, request, slug, project_id, cycle_id):
)
.order_by("display_name")
)
+ # assignee distribution serialized
+ assignee_distribution_data = [
+ {
+ "display_name": item["display_name"],
+ "assignee_id": (
+ str(item["assignee_id"]) if item["assignee_id"] else None
+ ),
+ "avatar": item["avatar"],
+ "total_issues": item["total_issues"],
+ "completed_issues": item["completed_issues"],
+ "pending_issues": item["pending_issues"],
+ }
+ for item in assignee_distribution
+ ]
+ # Get the label distribution
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
@@ -1023,7 +1187,9 @@ def post(self, request, slug, project_id, cycle_id):
assignee_distribution_data = [
{
"display_name": item["display_name"],
- "assignee_id": str(item["assignee_id"]) if item["assignee_id"] else None,
+ "assignee_id": (
+ str(item["assignee_id"]) if item["assignee_id"] else None
+ ),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
@@ -1032,11 +1198,14 @@ def post(self, request, slug, project_id, cycle_id):
for item in assignee_distribution
]
+ # Label distribution serilization
label_distribution_data = [
{
"label_name": item["label_name"],
"color": item["color"],
- "label_id": str(item["label_id"]) if item["label_id"] else None,
+ "label_id": (
+ str(item["label_id"]) if item["label_id"] else None
+ ),
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
@@ -1055,10 +1224,7 @@ def post(self, request, slug, project_id, cycle_id):
"started_issues": old_cycle.first().started_issues,
"unstarted_issues": old_cycle.first().unstarted_issues,
"backlog_issues": old_cycle.first().backlog_issues,
- "total_estimates": old_cycle.first().total_estimates,
- "completed_estimates": old_cycle.first().completed_estimates,
- "started_estimates": old_cycle.first().started_estimates,
- "distribution":{
+ "distribution": {
"labels": label_distribution_data,
"assignees": assignee_distribution_data,
"completion_chart": completion_chart,
diff --git a/apiserver/plane/app/views/dashboard.py b/apiserver/plane/app/views/dashboard.py
index 1366a2886a9..62ce0d910fe 100644
--- a/apiserver/plane/app/views/dashboard.py
+++ b/apiserver/plane/app/views/dashboard.py
@@ -15,6 +15,10 @@
Func,
Prefetch,
)
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
from django.utils import timezone
# Third Party imports
@@ -54,6 +58,7 @@ def dashboard_overview_stats(self, request, slug):
pending_issues_count = Issue.issue_objects.filter(
~Q(state__group__in=["completed", "cancelled"]),
+ target_date__lt=timezone.now().date(),
project__project_projectmember__is_active=True,
project__project_projectmember__member=request.user,
workspace__slug=slug,
@@ -130,7 +135,32 @@ def dashboard_assigned_issues(self, request, slug):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
- .order_by("created_at")
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
)
# Priority Ordering
@@ -259,6 +289,32 @@ def dashboard_created_issues(self, request, slug):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
.order_by("created_at")
)
diff --git a/apiserver/plane/app/views/inbox.py b/apiserver/plane/app/views/inbox.py
index f76c74d9c1d..ed32a14febf 100644
--- a/apiserver/plane/app/views/inbox.py
+++ b/apiserver/plane/app/views/inbox.py
@@ -3,8 +3,12 @@
# Django import
from django.utils import timezone
-from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
+from django.db.models import Q, Count, OuterRef, Func, F, Prefetch, Exists
from django.core.serializers.json import DjangoJSONEncoder
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
# Third party imports
from rest_framework import status
@@ -21,12 +25,14 @@
IssueLink,
IssueAttachment,
ProjectMember,
+ IssueReaction,
+ IssueSubscriber,
)
from plane.app.serializers import (
+ IssueCreateSerializer,
IssueSerializer,
InboxSerializer,
InboxIssueSerializer,
- IssueCreateSerializer,
IssueDetailSerializer,
)
from plane.utils.issue_filters import issue_filters
@@ -92,7 +98,7 @@ def get_queryset(self):
Issue.objects.filter(
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
- issue_inbox__inbox_id=self.kwargs.get("inbox_id")
+ issue_inbox__inbox_id=self.kwargs.get("inbox_id"),
)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
@@ -127,14 +133,75 @@ def get_queryset(self):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
).distinct()
def list(self, request, slug, project_id, inbox_id):
filters = issue_filters(request.query_params, "GET")
- issue_queryset = self.get_queryset().filter(**filters).order_by("issue_inbox__snoozed_till", "issue_inbox__status")
- issues_data = IssueSerializer(issue_queryset, expand=self.expand, many=True).data
+ issue_queryset = (
+ self.get_queryset()
+ .filter(**filters)
+ .order_by("issue_inbox__snoozed_till", "issue_inbox__status")
+ )
+ if self.expand:
+ issues = IssueSerializer(
+ issue_queryset, expand=self.expand, many=True
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
return Response(
- issues_data,
+ issues,
status=status.HTTP_200_OK,
)
@@ -199,8 +266,8 @@ def create(self, request, slug, project_id, inbox_id):
source=request.data.get("source", "in-app"),
)
- issue = (self.get_queryset().filter(pk=issue.id).first())
- serializer = IssueSerializer(issue ,expand=self.expand)
+ issue = self.get_queryset().filter(pk=issue.id).first()
+ serializer = IssueSerializer(issue, expand=self.expand)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, inbox_id, issue_id):
@@ -230,11 +297,7 @@ def partial_update(self, request, slug, project_id, inbox_id, issue_id):
issue_data = request.data.pop("issue", False)
if bool(issue_data):
- issue = Issue.objects.get(
- pk=inbox_issue.issue_id,
- workspace__slug=slug,
- project_id=project_id,
- )
+ issue = self.get_queryset().filter(pk=inbox_issue.issue_id).first()
# Only allow guests and viewers to edit name and description
if project_member.role <= 10:
# viewers and guests since only viewers and guests
@@ -320,20 +383,54 @@ def partial_update(self, request, slug, project_id, inbox_id, issue_id):
if state is not None:
issue.state = state
issue.save()
- issue = (self.get_queryset().filter(pk=issue_id).first())
- serializer = IssueSerializer(issue, expand=self.expand)
- return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(status=status.HTTP_204_NO_CONTENT)
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
else:
- issue = (self.get_queryset().filter(pk=issue_id).first())
- serializer = IssueSerializer(issue ,expand=self.expand)
+ issue = self.get_queryset().filter(pk=issue_id).first()
+ serializer = IssueSerializer(issue, expand=self.expand)
return Response(serializer.data, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, inbox_id, issue_id):
- issue = self.get_queryset().filter(pk=issue_id).first()
- serializer = IssueDetailSerializer(issue, expand=self.expand,)
+ issue = (
+ self.get_queryset()
+ .filter(pk=issue_id)
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related(
+ "issue", "actor"
+ ),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_attachment",
+ queryset=IssueAttachment.objects.select_related("issue"),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("created_by"),
+ )
+ )
+ .annotate(
+ is_subscribed=Exists(
+ IssueSubscriber.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=OuterRef("pk"),
+ subscriber=request.user,
+ )
+ )
+ )
+ ).first()
+ if issue is None:
+ return Response({"error": "Requested object was not found"}, status=status.HTTP_404_NOT_FOUND)
+
+ serializer = IssueDetailSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, inbox_id, issue_id):
diff --git a/apiserver/plane/app/views/integration/slack.py b/apiserver/plane/app/views/integration/slack.py
index 410e6b332c3..c22ee3e52bd 100644
--- a/apiserver/plane/app/views/integration/slack.py
+++ b/apiserver/plane/app/views/integration/slack.py
@@ -36,7 +36,10 @@ def get_queryset(self):
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
)
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
)
def create(self, request, slug, project_id, workspace_integration_id):
diff --git a/apiserver/plane/app/views/issue.py b/apiserver/plane/app/views/issue.py
index edefade16e4..14e0b6a9aa9 100644
--- a/apiserver/plane/app/views/issue.py
+++ b/apiserver/plane/app/views/issue.py
@@ -4,7 +4,6 @@
from itertools import chain
# Django imports
-from django.db import models
from django.utils import timezone
from django.db.models import (
Prefetch,
@@ -12,19 +11,21 @@
Func,
F,
Q,
- Count,
Case,
Value,
CharField,
When,
Exists,
Max,
- IntegerField,
)
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.db import IntegrityError
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
# Third Party imports
from rest_framework.response import Response
@@ -67,15 +68,11 @@
Label,
IssueLink,
IssueAttachment,
- State,
IssueSubscriber,
ProjectMember,
IssueReaction,
CommentReaction,
- ProjectDeployBoard,
- IssueVote,
IssueRelation,
- ProjectPublicMember,
)
from plane.bgtasks.issue_activites_task import issue_activity
from plane.utils.grouper import group_results
@@ -83,6 +80,192 @@
from collections import defaultdict
+class IssueListEndpoint(BaseAPIView):
+
+ permission_classes = [
+ ProjectEntityPermission,
+ ]
+
+ def get(self, request, slug, project_id):
+ issue_ids = request.GET.get("issues", False)
+
+ if not issue_ids:
+ return Response(
+ {"error": "Issues are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ issue_ids = [issue_id for issue_id in issue_ids.split(",") if issue_id != ""]
+
+ queryset = (
+ Issue.issue_objects.filter(
+ workspace__slug=slug, project_id=project_id, pk__in=issue_ids
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .select_related("workspace", "project", "state", "parent")
+ .prefetch_related("assignees", "labels", "issue_module__module")
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(
+ issue=OuterRef("id")
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(
+ parent=OuterRef("id")
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
+ ).distinct()
+
+ filters = issue_filters(request.query_params, "GET")
+
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = [
+ "backlog",
+ "unstarted",
+ "started",
+ "completed",
+ "cancelled",
+ ]
+
+ order_by_param = request.GET.get("order_by", "-created_at")
+
+ issue_queryset = queryset.filter(**filters)
+
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = (
+ priority_order
+ if order_by_param == "priority"
+ else priority_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[
+ When(priority=p, then=Value(i))
+ for i, p in enumerate(priority_order)
+ ],
+ output_field=CharField(),
+ )
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = (
+ state_order
+ if order_by_param in ["state__name", "state__group"]
+ else state_order[::-1]
+ )
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[
+ When(state__group=state_group, then=Value(i))
+ for i, state_group in enumerate(state_order)
+ ],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(
+ order_by_param[1::]
+ if order_by_param.startswith("-")
+ else order_by_param
+ )
+ ).order_by(
+ "-max_values"
+ if order_by_param.startswith("-")
+ else "max_values"
+ )
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ if self.fields or self.expand:
+ issues = IssueSerializer(
+ queryset, many=True, fields=self.fields, expand=self.expand
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
+ return Response(issues, status=status.HTTP_200_OK)
+
+
class IssueViewSet(WebhookMixin, BaseViewSet):
def get_serializer_class(self):
return (
@@ -115,12 +298,6 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
@@ -144,12 +321,40 @@ def get_queryset(self):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
).distinct()
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET")
+ order_by_param = request.GET.get("order_by", "-created_at")
+ issue_queryset = self.get_queryset().filter(**filters)
# Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [
@@ -160,10 +365,6 @@ def list(self, request, slug, project_id):
"cancelled",
]
- order_by_param = request.GET.get("order_by", "-created_at")
-
- issue_queryset = self.get_queryset().filter(**filters)
-
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
priority_order = (
@@ -224,9 +425,42 @@ def list(self, request, slug, project_id):
else:
issue_queryset = issue_queryset.order_by(order_by_param)
- issues = IssueSerializer(
- issue_queryset, many=True, fields=self.fields, expand=self.expand
- ).data
+ # Only use serializer when expand or fields else return by values
+ if self.expand or self.fields:
+ issues = IssueSerializer(
+ issue_queryset,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
@@ -259,28 +493,97 @@ def create(self, request, slug, project_id):
origin=request.META.get("HTTP_ORIGIN"),
)
issue = (
- self.get_queryset().filter(pk=serializer.data["id"]).first()
+ self.get_queryset()
+ .filter(pk=serializer.data["id"])
+ .values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
+ .first()
)
- serializer = IssueSerializer(issue)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(issue, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk=None):
- issue = self.get_queryset().filter(pk=pk).first()
- return Response(
- IssueDetailSerializer(
- issue, fields=self.fields, expand=self.expand
- ).data,
- status=status.HTTP_200_OK,
- )
+ issue = (
+ self.get_queryset()
+ .filter(pk=pk)
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related(
+ "issue", "actor"
+ ),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_attachment",
+ queryset=IssueAttachment.objects.select_related("issue"),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("created_by"),
+ )
+ )
+ .annotate(
+ is_subscribed=Exists(
+ IssueSubscriber.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=OuterRef("pk"),
+ subscriber=request.user,
+ )
+ )
+ )
+ ).first()
+ if not issue:
+ return Response(
+ {"error": "The required object does not exist."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+
+ serializer = IssueDetailSerializer(issue, expand=self.expand)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, pk=None):
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
+ issue = self.get_queryset().filter(pk=pk).first()
+
+ if not issue:
+ return Response(
+ {"error": "Issue not found"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+
current_instance = json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
+
requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
serializer = IssueCreateSerializer(
issue, data=request.data, partial=True
@@ -299,18 +602,13 @@ def partial_update(self, request, slug, project_id, pk=None):
origin=request.META.get("HTTP_ORIGIN"),
)
issue = self.get_queryset().filter(pk=pk).first()
- return Response(
- IssueSerializer(issue).data, status=status.HTTP_200_OK
- )
+ return Response(status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
- current_instance = json.dumps(
- IssueSerializer(issue).data, cls=DjangoJSONEncoder
- )
issue.delete()
issue_activity.delay(
type="issue.activity.deleted",
@@ -318,7 +616,7 @@ def destroy(self, request, slug, project_id, pk=None):
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
- current_instance=current_instance,
+ current_instance={},
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
@@ -326,6 +624,7 @@ def destroy(self, request, slug, project_id, pk=None):
return Response(status=status.HTTP_204_NO_CONTENT)
+# TODO: deprecated remove once confirmed
class UserWorkSpaceIssues(BaseAPIView):
@method_decorator(gzip_page)
def get(self, request, slug):
@@ -380,12 +679,6 @@ def get(self, request, slug):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
.filter(**filters)
).distinct()
@@ -470,6 +763,7 @@ def get(self, request, slug):
return Response(issues, status=status.HTTP_200_OK)
+# TODO: deprecated remove once confirmed
class WorkSpaceIssuesEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
@@ -479,7 +773,10 @@ class WorkSpaceIssuesEndpoint(BaseAPIView):
def get(self, request, slug):
issues = (
Issue.issue_objects.filter(workspace__slug=slug)
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by("-created_at")
)
serializer = IssueSerializer(issues, many=True)
@@ -502,6 +799,7 @@ def get(self, request, slug, project_id, issue_id):
.filter(
~Q(field__in=["comment", "vote", "reaction", "draft"]),
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.filter(**filters)
@@ -511,6 +809,7 @@ def get(self, request, slug, project_id, issue_id):
IssueComment.objects.filter(issue_id=issue_id)
.filter(
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
.filter(**filters)
@@ -562,7 +861,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.select_related("issue")
@@ -724,7 +1026,10 @@ def get_queryset(self):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.select_related("parent")
@@ -772,39 +1077,56 @@ def get(self, request, slug, project_id, issue_id):
Issue.issue_objects.filter(
parent_id=issue_id, workspace__slug=slug
)
- .select_related("project")
- .select_related("workspace")
- .select_related("state")
- .select_related("parent")
- .prefetch_related("assignees")
- .prefetch_related("labels")
+ .select_related("workspace", "project", "state", "parent")
+ .prefetch_related("assignees", "labels", "issue_module__module")
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
- sub_issues_count=Issue.issue_objects.filter(
- parent=OuterRef("id")
- )
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ attachment_count=IssueAttachment.objects.filter(
+ issue=OuterRef("id")
+ )
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
+ sub_issues_count=Issue.issue_objects.filter(
+ parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
)
.annotate(state_group=F("state__group"))
)
@@ -814,13 +1136,36 @@ def get(self, request, slug, project_id, issue_id):
for sub_issue in sub_issues:
result[sub_issue.state_group].append(str(sub_issue.id))
- serializer = IssueSerializer(
- sub_issues,
- many=True,
+ sub_issues = sub_issues.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
)
return Response(
{
- "sub_issues": serializer.data,
+ "sub_issues": sub_issues,
"state_distribution": result,
},
status=status.HTTP_200_OK,
@@ -897,7 +1242,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by("-created_at")
.distinct()
)
@@ -1085,7 +1433,7 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
- .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
@@ -1108,15 +1456,36 @@ def get_queryset(self):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
)
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
- fields = [
- field
- for field in request.GET.get("fields", "").split(",")
- if field
- ]
filters = issue_filters(request.query_params, "GET")
show_sub_issues = request.GET.get("show_sub_issues", "true")
@@ -1132,10 +1501,7 @@ def list(self, request, slug, project_id):
order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- )
+ issue_queryset = self.get_queryset().filter(**filters)
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
@@ -1202,20 +1568,114 @@ def list(self, request, slug, project_id):
if show_sub_issues == "true"
else issue_queryset.filter(parent__isnull=True)
)
-
- issues = IssueSerializer(
- issue_queryset, many=True, fields=fields if fields else None
- ).data
+ if self.expand or self.fields:
+ issues = IssueSerializer(
+ issue_queryset,
+ many=True,
+ fields=self.fields,
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
return Response(issues, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, pk=None):
- issue = self.get_queryset().filter(pk=pk).first()
- return Response(
- IssueDetailSerializer(
- issue, fields=self.fields, expand=self.expand
- ).data,
- status=status.HTTP_200_OK,
+ issue = (
+ self.get_queryset()
+ .filter(pk=pk)
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related(
+ "issue", "actor"
+ ),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_attachment",
+ queryset=IssueAttachment.objects.select_related("issue"),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("created_by"),
+ )
+ )
+ .annotate(
+ is_subscribed=Exists(
+ IssueSubscriber.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=OuterRef("pk"),
+ subscriber=request.user,
+ )
+ )
+ )
+ ).first()
+ if not issue:
+ return Response(
+ {"error": "The required object does not exist."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ serializer = IssueDetailSerializer(issue, expand=self.expand)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ def archive(self, request, slug, project_id, pk=None):
+ issue = Issue.issue_objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ pk=pk,
+ )
+ if issue.state.group not in ["completed", "cancelled"]:
+ return Response(
+ {"error": "Can only archive completed or cancelled state group issue"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=json.dumps({"archived_at": str(timezone.now().date()), "automation": False}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=json.dumps(
+ IssueSerializer(issue).data, cls=DjangoJSONEncoder
+ ),
+ epoch=int(timezone.now().timestamp()),
+ notification=True,
+ origin=request.META.get("HTTP_ORIGIN"),
)
+ issue.archived_at = timezone.now().date()
+ issue.save()
+
+ return Response({"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK)
+
def unarchive(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(
@@ -1240,7 +1700,7 @@ def unarchive(self, request, slug, project_id, pk=None):
issue.archived_at = None
issue.save()
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
+ return Response(status=status.HTTP_204_NO_CONTENT)
class IssueSubscriberViewSet(BaseViewSet):
@@ -1276,7 +1736,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by("-created_at")
.distinct()
)
@@ -1360,7 +1823,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by("-created_at")
.distinct()
)
@@ -1429,7 +1895,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(comment_id=self.kwargs.get("comment_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by("-created_at")
.distinct()
)
@@ -1499,7 +1968,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(issue_id=self.kwargs.get("issue_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.select_related("issue")
@@ -1580,15 +2052,17 @@ def create(self, request, slug, project_id, issue_id):
issue_relation = IssueRelation.objects.bulk_create(
[
IssueRelation(
- issue_id=issue
- if relation_type == "blocking"
- else issue_id,
- related_issue_id=issue_id
- if relation_type == "blocking"
- else issue,
- relation_type="blocked_by"
- if relation_type == "blocking"
- else relation_type,
+ issue_id=(
+ issue if relation_type == "blocking" else issue_id
+ ),
+ related_issue_id=(
+ issue_id if relation_type == "blocking" else issue
+ ),
+ relation_type=(
+ "blocked_by"
+ if relation_type == "blocking"
+ else relation_type
+ ),
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,
@@ -1669,19 +2143,11 @@ class IssueDraftViewSet(BaseViewSet):
def get_queryset(self):
return (
- Issue.objects.filter(
- project_id=self.kwargs.get("project_id")
- )
+ Issue.objects.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(is_draft=True)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
- )
- )
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
@@ -1705,6 +2171,32 @@ def get_queryset(self):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
).distinct()
@method_decorator(gzip_page)
@@ -1728,10 +2220,7 @@ def list(self, request, slug, project_id):
order_by_param = request.GET.get("order_by", "-created_at")
- issue_queryset = (
- self.get_queryset()
- .filter(**filters)
- )
+ issue_queryset = self.get_queryset().filter(**filters)
# Priority Ordering
if order_by_param == "priority" or order_by_param == "-priority":
@@ -1793,9 +2282,42 @@ def list(self, request, slug, project_id):
else:
issue_queryset = issue_queryset.order_by(order_by_param)
- issues = IssueSerializer(
- issue_queryset, many=True, fields=fields if fields else None
- ).data
+ # Only use serializer when expand else return by values
+ if self.expand or self.fields:
+ issues = IssueSerializer(
+ issue_queryset,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
@@ -1830,24 +2352,24 @@ def create(self, request, slug, project_id):
issue = (
self.get_queryset().filter(pk=serializer.data["id"]).first()
)
- return Response(IssueSerializer(issue).data, status=status.HTTP_201_CREATED)
+ return Response(
+ IssueSerializer(issue).data, status=status.HTTP_201_CREATED
+ )
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, pk):
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk
- )
- serializer = IssueSerializer(issue, data=request.data, partial=True)
+ issue = self.get_queryset().filter(pk=pk).first()
+
+ if not issue:
+ return Response(
+ {"error": "Issue does not exist"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+
+ serializer = IssueCreateSerializer(issue, data=request.data, partial=True)
if serializer.is_valid():
- if request.data.get(
- "is_draft"
- ) is not None and not request.data.get("is_draft"):
- serializer.save(
- created_at=timezone.now(), updated_at=timezone.now()
- )
- else:
- serializer.save()
+ serializer.save()
issue_activity.delay(
type="issue_draft.activity.updated",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
@@ -1862,25 +2384,57 @@ def partial_update(self, request, slug, project_id, pk):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
- return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(status=status.HTTP_204_NO_CONTENT)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk=None):
- issue = self.get_queryset().filter(pk=pk).first()
- return Response(
- IssueSerializer(
- issue, fields=self.fields, expand=self.expand
- ).data,
- status=status.HTTP_200_OK,
- )
+ issue = (
+ self.get_queryset()
+ .filter(pk=pk)
+ .prefetch_related(
+ Prefetch(
+ "issue_reactions",
+ queryset=IssueReaction.objects.select_related(
+ "issue", "actor"
+ ),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_attachment",
+ queryset=IssueAttachment.objects.select_related("issue"),
+ )
+ )
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("created_by"),
+ )
+ )
+ .annotate(
+ is_subscribed=Exists(
+ IssueSubscriber.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ issue_id=OuterRef("pk"),
+ subscriber=request.user,
+ )
+ )
+ )
+ ).first()
+
+ if not issue:
+ return Response(
+ {"error": "The required object does not exist."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ serializer = IssueDetailSerializer(issue, expand=self.expand)
+ return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
- current_instance = json.dumps(
- IssueSerializer(issue).data, cls=DjangoJSONEncoder
- )
issue.delete()
issue_activity.delay(
type="issue_draft.activity.deleted",
@@ -1888,7 +2442,7 @@ def destroy(self, request, slug, project_id, pk=None):
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
- current_instance=current_instance,
+ current_instance={},
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
diff --git a/apiserver/plane/app/views/module.py b/apiserver/plane/app/views/module.py
index 4792a1f7996..3b52db64f9e 100644
--- a/apiserver/plane/app/views/module.py
+++ b/apiserver/plane/app/views/module.py
@@ -4,11 +4,12 @@
# Django Imports
from django.utils import timezone
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
-from django.core import serializers
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
-from django.core.serializers.json import DjangoJSONEncoder
-
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
# Third party imports
from rest_framework.response import Response
@@ -24,6 +25,7 @@
ModuleFavoriteSerializer,
IssueSerializer,
ModuleUserPropertiesSerializer,
+ ModuleDetailSerializer,
)
from plane.app.permissions import (
ProjectEntityPermission,
@@ -38,11 +40,9 @@
ModuleFavorite,
IssueLink,
IssueAttachment,
- IssueSubscriber,
ModuleUserProperties,
)
from plane.bgtasks.issue_activites_task import issue_activity
-from plane.utils.grouper import group_results
from plane.utils.issue_filters import issue_filters
from plane.utils.analytics_plot import burndown_plot
@@ -62,7 +62,7 @@ def get_serializer_class(self):
)
def get_queryset(self):
- subquery = ModuleFavorite.objects.filter(
+ favorite_subquery = ModuleFavorite.objects.filter(
user=self.request.user,
module_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
@@ -73,7 +73,7 @@ def get_queryset(self):
.get_queryset()
.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
- .annotate(is_favorite=Exists(subquery))
+ .annotate(is_favorite=Exists(favorite_subquery))
.select_related("project")
.select_related("workspace")
.select_related("lead")
@@ -145,6 +145,16 @@ def get_queryset(self):
),
)
)
+ .annotate(
+ member_ids=Coalesce(
+ ArrayAgg(
+ "members__id",
+ distinct=True,
+ filter=~Q(members__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ )
+ )
.order_by("-is_favorite", "-created_at")
)
@@ -157,25 +167,84 @@ def create(self, request, slug, project_id):
if serializer.is_valid():
serializer.save()
- module = Module.objects.get(pk=serializer.data["id"])
- serializer = ModuleSerializer(module)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ module = (
+ self.get_queryset()
+ .filter(pk=serializer.data["id"])
+ .values( # Required fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # Model fields
+ "name",
+ "description",
+ "description_text",
+ "description_html",
+ "start_date",
+ "target_date",
+ "status",
+ "lead_id",
+ "member_ids",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ # computed fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "created_at",
+ "updated_at",
+ )
+ ).first()
+ return Response(module, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
queryset = self.get_queryset()
- fields = [
- field
- for field in request.GET.get("fields", "").split(",")
- if field
- ]
- modules = ModuleSerializer(
- queryset, many=True, fields=fields if fields else None
- ).data
+ if self.fields:
+ modules = ModuleSerializer(
+ queryset,
+ many=True,
+ fields=self.fields,
+ ).data
+ else:
+ modules = queryset.values( # Required fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # Model fields
+ "name",
+ "description",
+ "description_text",
+ "description_html",
+ "start_date",
+ "target_date",
+ "status",
+ "lead_id",
+ "member_ids",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ # computed fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "created_at",
+ "updated_at",
+ )
return Response(modules, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, pk):
- queryset = self.get_queryset().get(pk=pk)
+ queryset = self.get_queryset().filter(pk=pk)
assignee_distribution = (
Issue.objects.filter(
@@ -269,16 +338,16 @@ def retrieve(self, request, slug, project_id, pk):
.order_by("label_name")
)
- data = ModuleSerializer(queryset).data
+ data = ModuleDetailSerializer(queryset.first()).data
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
- if queryset.start_date and queryset.target_date:
+ if queryset.first().start_date and queryset.first().target_date:
data["distribution"]["completion_chart"] = burndown_plot(
- queryset=queryset,
+ queryset=queryset.first(),
slug=slug,
project_id=project_id,
module_id=pk,
@@ -289,6 +358,47 @@ def retrieve(self, request, slug, project_id, pk):
status=status.HTTP_200_OK,
)
+ def partial_update(self, request, slug, project_id, pk):
+ queryset = self.get_queryset().filter(pk=pk)
+ serializer = ModuleWriteSerializer(
+ queryset.first(), data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ module = queryset.values(
+ # Required fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # Model fields
+ "name",
+ "description",
+ "description_text",
+ "description_html",
+ "start_date",
+ "target_date",
+ "status",
+ "lead_id",
+ "member_ids",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ # computed fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "created_at",
+ "updated_at",
+ ).first()
+ return Response(module, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
def destroy(self, request, slug, project_id, pk):
module = Module.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
@@ -331,17 +441,15 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
ProjectEntityPermission,
]
-
def get_queryset(self):
return (
Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
- issue_module__module_id=self.kwargs.get("module_id")
+ issue_module__module_id=self.kwargs.get("module_id"),
)
.select_related("workspace", "project", "state", "parent")
- .prefetch_related("labels", "assignees")
- .prefetch_related('issue_module__module')
+ .prefetch_related("assignees", "labels", "issue_module__module")
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
@@ -365,6 +473,32 @@ def get_queryset(self):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
).distinct()
@method_decorator(gzip_page)
@@ -376,15 +510,44 @@ def list(self, request, slug, project_id, module_id):
]
filters = issue_filters(request.query_params, "GET")
issue_queryset = self.get_queryset().filter(**filters)
- serializer = IssueSerializer(
- issue_queryset, many=True, fields=fields if fields else None
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
+ if self.fields or self.expand:
+ issues = IssueSerializer(
+ issue_queryset, many=True, fields=fields if fields else None
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
+ return Response(issues, status=status.HTTP_200_OK)
# create multiple issues inside a module
def create_module_issues(self, request, slug, project_id, module_id):
issues = request.data.get("issues", [])
- if not len(issues):
+ if not issues:
return Response(
{"error": "Issues are required"},
status=status.HTTP_400_BAD_REQUEST,
@@ -420,15 +583,12 @@ def create_module_issues(self, request, slug, project_id, module_id):
)
for issue in issues
]
- issues = (self.get_queryset().filter(pk__in=issues))
- serializer = IssueSerializer(issues , many=True)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
-
+ return Response({"message": "success"}, status=status.HTTP_201_CREATED)
# create multiple module inside an issue
def create_issue_modules(self, request, slug, project_id, issue_id):
modules = request.data.get("modules", [])
- if not len(modules):
+ if not modules:
return Response(
{"error": "Modules are required"},
status=status.HTTP_400_BAD_REQUEST,
@@ -466,10 +626,7 @@ def create_issue_modules(self, request, slug, project_id, issue_id):
for module in modules
]
- issue = (self.get_queryset().filter(pk=issue_id).first())
- serializer = IssueSerializer(issue)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
-
+ return Response({"message": "success"}, status=status.HTTP_201_CREATED)
def destroy(self, request, slug, project_id, module_id, issue_id):
module_issue = ModuleIssue.objects.get(
@@ -484,7 +641,9 @@ def destroy(self, request, slug, project_id, module_id, issue_id):
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
- current_instance=json.dumps({"module_name": module_issue.module.name}),
+ current_instance=json.dumps(
+ {"module_name": module_issue.module.name}
+ ),
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
@@ -514,7 +673,10 @@ def get_queryset(self):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(module_id=self.kwargs.get("module_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.order_by("-created_at")
.distinct()
)
diff --git a/apiserver/plane/app/views/page.py b/apiserver/plane/app/views/page.py
index 1d8ff1fbb15..7ecf22fa847 100644
--- a/apiserver/plane/app/views/page.py
+++ b/apiserver/plane/app/views/page.py
@@ -60,7 +60,10 @@ def get_queryset(self):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.filter(parent__isnull=True)
.filter(Q(owned_by=self.request.user) | Q(access=0))
.select_related("project")
diff --git a/apiserver/plane/app/views/project.py b/apiserver/plane/app/views/project.py
index 5d2f9567305..6f9b2618e19 100644
--- a/apiserver/plane/app/views/project.py
+++ b/apiserver/plane/app/views/project.py
@@ -77,6 +77,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
]
def get_queryset(self):
+ sort_order = ProjectMember.objects.filter(
+ member=self.request.user,
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ is_active=True,
+ ).values("sort_order")
return self.filter_queryset(
super()
.get_queryset()
@@ -147,6 +153,7 @@ def get_queryset(self):
)
)
)
+ .annotate(sort_order=Subquery(sort_order))
.prefetch_related(
Prefetch(
"project_projectmember",
@@ -166,16 +173,8 @@ def list(self, request, slug):
for field in request.GET.get("fields", "").split(",")
if field
]
-
- sort_order_query = ProjectMember.objects.filter(
- member=request.user,
- project_id=OuterRef("pk"),
- workspace__slug=self.kwargs.get("slug"),
- is_active=True,
- ).values("sort_order")
projects = (
self.get_queryset()
- .annotate(sort_order=Subquery(sort_order_query))
.order_by("sort_order", "name")
)
if request.GET.get("per_page", False) and request.GET.get(
@@ -204,7 +203,7 @@ def create(self, request, slug):
serializer.save()
# Add the user as Administrator to the project
- project_member = ProjectMember.objects.create(
+ _ = ProjectMember.objects.create(
project_id=serializer.data["id"],
member=request.user,
role=20,
diff --git a/apiserver/plane/app/views/search.py b/apiserver/plane/app/views/search.py
index ccef3d18f12..a2ed1c015ad 100644
--- a/apiserver/plane/app/views/search.py
+++ b/apiserver/plane/app/views/search.py
@@ -48,8 +48,8 @@ def filter_projects(self, query, slug, project_id, workspace_search):
return (
Project.objects.filter(
q,
- Q(project_projectmember__member=self.request.user)
- | Q(network=2),
+ project_projectmember__member=self.request.user,
+ project_projectmember__is_active=True,
workspace__slug=slug,
)
.distinct()
@@ -71,6 +71,7 @@ def filter_issues(self, query, slug, project_id, workspace_search):
issues = Issue.issue_objects.filter(
q,
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
@@ -95,6 +96,7 @@ def filter_cycles(self, query, slug, project_id, workspace_search):
cycles = Cycle.objects.filter(
q,
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
@@ -118,6 +120,7 @@ def filter_modules(self, query, slug, project_id, workspace_search):
modules = Module.objects.filter(
q,
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
@@ -141,6 +144,7 @@ def filter_pages(self, query, slug, project_id, workspace_search):
pages = Page.objects.filter(
q,
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
@@ -164,6 +168,7 @@ def filter_views(self, query, slug, project_id, workspace_search):
issue_views = IssueView.objects.filter(
q,
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
workspace__slug=slug,
)
@@ -236,6 +241,7 @@ def get(self, request, slug, project_id):
issues = Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
)
if workspace_search == "false":
diff --git a/apiserver/plane/app/views/state.py b/apiserver/plane/app/views/state.py
index 242061e1878..34b3d1dcc01 100644
--- a/apiserver/plane/app/views/state.py
+++ b/apiserver/plane/app/views/state.py
@@ -31,7 +31,10 @@ def get_queryset(self):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.filter(~Q(name="Triage"))
.select_related("project")
.select_related("workspace")
diff --git a/apiserver/plane/app/views/view.py b/apiserver/plane/app/views/view.py
index 27f31f7a9ba..ade445fae80 100644
--- a/apiserver/plane/app/views/view.py
+++ b/apiserver/plane/app/views/view.py
@@ -1,6 +1,6 @@
# Django imports
from django.db.models import (
- Prefetch,
+ Q,
OuterRef,
Func,
F,
@@ -13,16 +13,21 @@
)
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
-from django.db.models import Prefetch, OuterRef, Exists
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
# Third party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
-from . import BaseViewSet, BaseAPIView
+from . import BaseViewSet
from plane.app.serializers import (
- GlobalViewSerializer,
IssueViewSerializer,
IssueSerializer,
IssueViewFavoriteSerializer,
@@ -30,22 +35,16 @@
from plane.app.permissions import (
WorkspaceEntityPermission,
ProjectEntityPermission,
- WorkspaceViewerPermission,
- ProjectLitePermission,
)
from plane.db.models import (
Workspace,
- GlobalView,
IssueView,
Issue,
IssueViewFavorite,
- IssueReaction,
IssueLink,
IssueAttachment,
- IssueSubscriber,
)
from plane.utils.issue_filters import issue_filters
-from plane.utils.grouper import group_results
class GlobalViewViewSet(BaseViewSet):
@@ -87,13 +86,60 @@ def get_queryset(self):
.values("count")
)
.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
- .prefetch_related(
- Prefetch(
- "issue_reactions",
- queryset=IssueReaction.objects.select_related("actor"),
+ .annotate(cycle_id=F("issue_cycle__cycle_id"))
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=IssueAttachment.objects.filter(
+ issue=OuterRef("id")
)
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(
+ parent=OuterRef("id")
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
)
)
@@ -121,30 +167,7 @@ def list(self, request, slug):
issue_queryset = (
self.get_queryset()
.filter(**filters)
- .filter(project__project_projectmember__member=self.request.user)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
- .annotate(
- link_count=IssueLink.objects.filter(issue=OuterRef("id"))
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- attachment_count=IssueAttachment.objects.filter(
- issue=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
- .annotate(
- sub_issues_count=Issue.issue_objects.filter(
- parent=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
- )
)
# Priority Ordering
@@ -207,10 +230,39 @@ def list(self, request, slug):
else:
issue_queryset = issue_queryset.order_by(order_by_param)
- serializer = IssueSerializer(
- issue_queryset, many=True, fields=fields if fields else None
- )
- return Response(serializer.data, status=status.HTTP_200_OK)
+ if self.fields:
+ issues = IssueSerializer(
+ issue_queryset, many=True, fields=self.fields
+ ).data
+ else:
+ issues = issue_queryset.values(
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ )
+ return Response(issues, status=status.HTTP_200_OK)
class IssueViewViewSet(BaseViewSet):
@@ -235,7 +287,10 @@ def get_queryset(self):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
.select_related("project")
.select_related("workspace")
.annotate(is_favorite=Exists(subquery))
diff --git a/apiserver/plane/app/views/workspace.py b/apiserver/plane/app/views/workspace.py
index f4d3dbbb5e0..47de86a1c17 100644
--- a/apiserver/plane/app/views/workspace.py
+++ b/apiserver/plane/app/views/workspace.py
@@ -22,9 +22,14 @@
When,
Max,
IntegerField,
+ Sum,
)
from django.db.models.functions import ExtractWeek, Cast, ExtractDay
from django.db.models.fields import DateField
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+from django.db.models import Value, UUIDField
+from django.db.models.functions import Coalesce
# Third party modules
from rest_framework import status
@@ -73,6 +78,9 @@
WorkspaceUserProperties,
Estimate,
EstimatePoint,
+ Module,
+ ModuleLink,
+ Cycle,
)
from plane.app.permissions import (
WorkSpaceBasePermission,
@@ -85,6 +93,12 @@
from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.utils.issue_filters import issue_filters
from plane.bgtasks.event_tracking_task import workspace_invite_event
+from plane.app.serializers.module import (
+ ModuleSerializer,
+)
+from plane.app.serializers.cycle import (
+ CycleSerializer,
+)
class WorkSpaceViewSet(BaseViewSet):
@@ -546,7 +560,6 @@ def get_queryset(self):
.get_queryset()
.filter(
workspace__slug=self.kwargs.get("slug"),
- member__is_bot=False,
is_active=True,
)
.select_related("workspace", "workspace__owner")
@@ -754,7 +767,6 @@ def get(self, request, slug):
project_ids = (
ProjectMember.objects.filter(
member=request.user,
- member__is_bot=False,
is_active=True,
)
.values_list("project_id", flat=True)
@@ -764,7 +776,6 @@ def get(self, request, slug):
# Get all the project members in which the user is involved
project_members = ProjectMember.objects.filter(
workspace__slug=slug,
- member__is_bot=False,
project_id__in=project_ids,
is_active=True,
).select_related("project", "member", "workspace")
@@ -1075,6 +1086,7 @@ def get(self, request, slug, user_id):
workspace__slug=slug,
assignees__in=[user_id],
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
.filter(**filters)
.annotate(state_group=F("state__group"))
@@ -1090,6 +1102,7 @@ def get(self, request, slug, user_id):
workspace__slug=slug,
assignees__in=[user_id],
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
.filter(**filters)
.values("priority")
@@ -1112,6 +1125,7 @@ def get(self, request, slug, user_id):
Issue.issue_objects.filter(
workspace__slug=slug,
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True,
created_by_id=user_id,
)
.filter(**filters)
@@ -1123,6 +1137,7 @@ def get(self, request, slug, user_id):
workspace__slug=slug,
assignees__in=[user_id],
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True,
)
.filter(**filters)
.count()
@@ -1134,6 +1149,7 @@ def get(self, request, slug, user_id):
workspace__slug=slug,
assignees__in=[user_id],
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True,
)
.filter(**filters)
.count()
@@ -1145,6 +1161,7 @@ def get(self, request, slug, user_id):
assignees__in=[user_id],
state__group="completed",
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
.filter(**filters)
.count()
@@ -1155,6 +1172,7 @@ def get(self, request, slug, user_id):
workspace__slug=slug,
subscriber_id=user_id,
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
.filter(**filters)
.count()
@@ -1204,6 +1222,7 @@ def get(self, request, slug, user_id):
~Q(field__in=["comment", "vote", "reaction", "draft"]),
workspace__slug=slug,
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True,
actor=user_id,
).select_related("actor", "workspace", "issue", "project")
@@ -1234,6 +1253,7 @@ def get(self, request, slug, user_id):
Project.objects.filter(
workspace__slug=slug,
project_projectmember__member=request.user,
+ project_projectmember__is_active=True,
)
.annotate(
created_issues=Count(
@@ -1343,6 +1363,7 @@ def get(self, request, slug, user_id):
| Q(issue_subscribers__subscriber_id=user_id),
workspace__slug=slug,
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
.filter(**filters)
.select_related("workspace", "project", "state", "parent")
@@ -1370,6 +1391,32 @@ def get(self, request, slug, user_id):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
+ .annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=~Q(labels__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=~Q(assignees__id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ module_ids=Coalesce(
+ ArrayAgg(
+ "issue_module__module_id",
+ distinct=True,
+ filter=~Q(issue_module__module_id__isnull=True),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ )
.order_by("created_at")
).distinct()
@@ -1448,6 +1495,7 @@ def get(self, request, slug):
labels = Label.objects.filter(
workspace__slug=slug,
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
serializer = LabelSerializer(labels, many=True).data
return Response(serializer, status=status.HTTP_200_OK)
@@ -1462,6 +1510,7 @@ def get(self, request, slug):
states = State.objects.filter(
workspace__slug=slug,
project__project_projectmember__member=request.user,
+ project__project_projectmember__is_active=True
)
serializer = StateSerializer(states, many=True).data
return Response(serializer, status=status.HTTP_200_OK)
@@ -1490,6 +1539,192 @@ def get(self, request, slug):
return Response(serializer.data, status=status.HTTP_200_OK)
+class WorkspaceModulesEndpoint(BaseAPIView):
+ permission_classes = [
+ WorkspaceViewerPermission,
+ ]
+
+ def get(self, request, slug):
+ modules = (
+ Module.objects.filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related(
+ "module", "created_by"
+ ),
+ )
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_module",
+ filter=Q(
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="completed",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="cancelled",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="started",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="unstarted",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="backlog",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ serializer = ModuleSerializer(modules, many=True).data
+ return Response(serializer, status=status.HTTP_200_OK)
+
+
+class WorkspaceCyclesEndpoint(BaseAPIView):
+ permission_classes = [
+ WorkspaceViewerPermission,
+ ]
+
+ def get(self, request, slug):
+ cycles = (
+ Cycle.objects.filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("owned_by")
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ total_estimates=Sum("issue_cycle__issue__estimate_point")
+ )
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+ serializer = CycleSerializer(cycles, many=True).data
+ return Response(serializer, status=status.HTTP_200_OK)
+
+
class WorkspaceUserPropertiesEndpoint(BaseAPIView):
permission_classes = [
WorkspaceViewerPermission,
diff --git a/apiserver/plane/bgtasks/email_notification_task.py b/apiserver/plane/bgtasks/email_notification_task.py
index 9e9b348e197..2a98c6b3324 100644
--- a/apiserver/plane/bgtasks/email_notification_task.py
+++ b/apiserver/plane/bgtasks/email_notification_task.py
@@ -1,21 +1,33 @@
from datetime import datetime
from bs4 import BeautifulSoup
-
# Third party imports
from celery import shared_task
+from sentry_sdk import capture_exception
# Django imports
from django.utils import timezone
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
+from django.conf import settings
# Module imports
from plane.db.models import EmailNotificationLog, User, Issue
from plane.license.utils.instance_value import get_email_configuration
from plane.settings.redis import redis_instance
+# acquire and delete redis lock
+def acquire_lock(lock_id, expire_time=300):
+ redis_client = redis_instance()
+ """Attempt to acquire a lock with a specified expiration time."""
+ return redis_client.set(lock_id, 'true', nx=True, ex=expire_time)
+
+def release_lock(lock_id):
+ """Release a lock."""
+ redis_client = redis_instance()
+ redis_client.delete(lock_id)
+
@shared_task
def stack_email_notification():
# get all email notifications
@@ -142,135 +154,155 @@ def process_html_content(content):
processed_content_list.append(processed_content)
return processed_content_list
+
@shared_task
def send_email_notification(
issue_id, notification_data, receiver_id, email_notification_ids
):
+ # Convert UUIDs to a sorted, concatenated string
+ sorted_ids = sorted(email_notification_ids)
+ ids_str = "_".join(str(id) for id in sorted_ids)
+ lock_id = f"send_email_notif_{issue_id}_{receiver_id}_{ids_str}"
+
+ # acquire the lock for sending emails
try:
- ri = redis_instance()
- base_api = (ri.get(str(issue_id)).decode())
- data = create_payload(notification_data=notification_data)
+ if acquire_lock(lock_id=lock_id):
+ # get the redis instance
+ ri = redis_instance()
+ base_api = (ri.get(str(issue_id)).decode())
+ data = create_payload(notification_data=notification_data)
- # Get email configurations
- (
- EMAIL_HOST,
- EMAIL_HOST_USER,
- EMAIL_HOST_PASSWORD,
- EMAIL_PORT,
- EMAIL_USE_TLS,
- EMAIL_FROM,
- ) = get_email_configuration()
+ # Get email configurations
+ (
+ EMAIL_HOST,
+ EMAIL_HOST_USER,
+ EMAIL_HOST_PASSWORD,
+ EMAIL_PORT,
+ EMAIL_USE_TLS,
+ EMAIL_FROM,
+ ) = get_email_configuration()
- receiver = User.objects.get(pk=receiver_id)
- issue = Issue.objects.get(pk=issue_id)
- template_data = []
- total_changes = 0
- comments = []
- actors_involved = []
- for actor_id, changes in data.items():
- actor = User.objects.get(pk=actor_id)
- total_changes = total_changes + len(changes)
- comment = changes.pop("comment", False)
- mention = changes.pop("mention", False)
- actors_involved.append(actor_id)
- if comment:
- comments.append(
- {
- "actor_comments": comment,
- "actor_detail": {
- "avatar_url": actor.avatar,
- "first_name": actor.first_name,
- "last_name": actor.last_name,
- },
- }
- )
- if mention:
- mention["new_value"] = process_html_content(mention.get("new_value"))
- mention["old_value"] = process_html_content(mention.get("old_value"))
- comments.append(
- {
- "actor_comments": mention,
- "actor_detail": {
- "avatar_url": actor.avatar,
- "first_name": actor.first_name,
- "last_name": actor.last_name,
- },
- }
- )
- activity_time = changes.pop("activity_time")
- # Parse the input string into a datetime object
- formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p")
+ receiver = User.objects.get(pk=receiver_id)
+ issue = Issue.objects.get(pk=issue_id)
+ template_data = []
+ total_changes = 0
+ comments = []
+ actors_involved = []
+ for actor_id, changes in data.items():
+ actor = User.objects.get(pk=actor_id)
+ total_changes = total_changes + len(changes)
+ comment = changes.pop("comment", False)
+ mention = changes.pop("mention", False)
+ actors_involved.append(actor_id)
+ if comment:
+ comments.append(
+ {
+ "actor_comments": comment,
+ "actor_detail": {
+ "avatar_url": actor.avatar,
+ "first_name": actor.first_name,
+ "last_name": actor.last_name,
+ },
+ }
+ )
+ if mention:
+ mention["new_value"] = process_html_content(mention.get("new_value"))
+ mention["old_value"] = process_html_content(mention.get("old_value"))
+ comments.append(
+ {
+ "actor_comments": mention,
+ "actor_detail": {
+ "avatar_url": actor.avatar,
+ "first_name": actor.first_name,
+ "last_name": actor.last_name,
+ },
+ }
+ )
+ activity_time = changes.pop("activity_time")
+ # Parse the input string into a datetime object
+ formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p")
- if changes:
- template_data.append(
- {
- "actor_detail": {
- "avatar_url": actor.avatar,
- "first_name": actor.first_name,
- "last_name": actor.last_name,
- },
- "changes": changes,
- "issue_details": {
- "name": issue.name,
- "identifier": f"{issue.project.identifier}-{issue.sequence_id}",
- },
- "activity_time": str(formatted_time),
- }
- )
+ if changes:
+ template_data.append(
+ {
+ "actor_detail": {
+ "avatar_url": actor.avatar,
+ "first_name": actor.first_name,
+ "last_name": actor.last_name,
+ },
+ "changes": changes,
+ "issue_details": {
+ "name": issue.name,
+ "identifier": f"{issue.project.identifier}-{issue.sequence_id}",
+ },
+ "activity_time": str(formatted_time),
+ }
+ )
- summary = "Updates were made to the issue by"
+ summary = "Updates were made to the issue by"
- # Send the mail
- subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
- context = {
- "data": template_data,
- "summary": summary,
- "actors_involved": len(set(actors_involved)),
- "issue": {
- "issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}",
- "name": issue.name,
+ # Send the mail
+ subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
+ context = {
+ "data": template_data,
+ "summary": summary,
+ "actors_involved": len(set(actors_involved)),
+ "issue": {
+ "issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}",
+ "name": issue.name,
+ "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
+ },
+ "receiver": {
+ "email": receiver.email,
+ },
"issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
- },
- "receiver": {
- "email": receiver.email,
- },
- "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
- "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/",
- "workspace":str(issue.project.workspace.slug),
- "project": str(issue.project.name),
- "user_preference": f"{base_api}/profile/preferences/email",
- "comments": comments,
- }
- html_content = render_to_string(
- "emails/notifications/issue-updates.html", context
- )
- text_content = strip_tags(html_content)
-
- try:
- connection = get_connection(
- host=EMAIL_HOST,
- port=int(EMAIL_PORT),
- username=EMAIL_HOST_USER,
- password=EMAIL_HOST_PASSWORD,
- use_tls=EMAIL_USE_TLS == "1",
+ "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/",
+ "workspace":str(issue.project.workspace.slug),
+ "project": str(issue.project.name),
+ "user_preference": f"{base_api}/profile/preferences/email",
+ "comments": comments,
+ }
+ html_content = render_to_string(
+ "emails/notifications/issue-updates.html", context
)
+ text_content = strip_tags(html_content)
- msg = EmailMultiAlternatives(
- subject=subject,
- body=text_content,
- from_email=EMAIL_FROM,
- to=[receiver.email],
- connection=connection,
- )
- msg.attach_alternative(html_content, "text/html")
- msg.send()
+ try:
+ connection = get_connection(
+ host=EMAIL_HOST,
+ port=int(EMAIL_PORT),
+ username=EMAIL_HOST_USER,
+ password=EMAIL_HOST_PASSWORD,
+ use_tls=EMAIL_USE_TLS == "1",
+ )
- EmailNotificationLog.objects.filter(
- pk__in=email_notification_ids
- ).update(sent_at=timezone.now())
+ msg = EmailMultiAlternatives(
+ subject=subject,
+ body=text_content,
+ from_email=EMAIL_FROM,
+ to=[receiver.email],
+ connection=connection,
+ )
+ msg.attach_alternative(html_content, "text/html")
+ msg.send()
+
+ EmailNotificationLog.objects.filter(
+ pk__in=email_notification_ids
+ ).update(sent_at=timezone.now())
+
+ # release the lock
+ release_lock(lock_id=lock_id)
+ return
+ except Exception as e:
+ capture_exception(e)
+ # release the lock
+ release_lock(lock_id=lock_id)
+ return
+ else:
+ print("Duplicate task recived. Skipping...")
return
- except Exception as e:
+ except (Issue.DoesNotExist, User.DoesNotExist) as e:
+ if settings.DEBUG:
print(e)
- return
- except Issue.DoesNotExist:
+ release_lock(lock_id=lock_id)
return
diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py
index b99e4b1d944..d8522e7697f 100644
--- a/apiserver/plane/bgtasks/export_task.py
+++ b/apiserver/plane/bgtasks/export_task.py
@@ -292,6 +292,7 @@ def issue_export_task(
workspace__id=workspace_id,
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
+ project__project_projectmember__is_active=True
)
.select_related(
"project", "workspace", "state", "parent", "created_by"
diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py
index 42152136358..7a1dc4fc6d2 100644
--- a/apiserver/plane/bgtasks/importer_task.py
+++ b/apiserver/plane/bgtasks/importer_task.py
@@ -60,15 +60,6 @@ def service_importer(service, importer_id):
batch_size=100,
)
- _ = [
- send_welcome_slack.delay(
- str(user.id),
- True,
- f"{user.email} was imported to Plane from {service}",
- )
- for user in new_users
- ]
-
workspace_users = User.objects.filter(
email__in=[
user.get("email").strip().lower()
diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py
index b86ab5e783e..2a16ee911a8 100644
--- a/apiserver/plane/bgtasks/issue_activites_task.py
+++ b/apiserver/plane/bgtasks/issue_activites_task.py
@@ -483,17 +483,23 @@ def track_archive_at(
)
)
else:
+ if requested_data.get("automation"):
+ comment = "Plane has archived the issue"
+ new_value = "archive"
+ else:
+ comment = "Actor has archived the issue"
+ new_value = "manual_archive"
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
- comment="Plane has archived the issue",
+ comment=comment,
verb="updated",
actor_id=actor_id,
field="archived_at",
old_value=None,
- new_value="archive",
+ new_value=new_value,
epoch=epoch,
)
)
diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py
index 974a545fcdd..c6c4d75158c 100644
--- a/apiserver/plane/bgtasks/issue_automation_task.py
+++ b/apiserver/plane/bgtasks/issue_automation_task.py
@@ -79,7 +79,7 @@ def archive_old_issues():
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
- {"archived_at": str(archive_at)}
+ {"archived_at": str(archive_at), "automation": True}
),
actor_id=str(project.created_by_id),
issue_id=issue.id,
diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py
index f254a3cb734..0377ccb8be0 100644
--- a/apiserver/plane/db/models/user.py
+++ b/apiserver/plane/db/models/user.py
@@ -12,15 +12,9 @@
PermissionsMixin,
)
from django.db.models.signals import post_save
-from django.conf import settings
from django.dispatch import receiver
from django.utils import timezone
-# Third party imports
-from sentry_sdk import capture_exception
-from slack_sdk import WebClient
-from slack_sdk.errors import SlackApiError
-
def get_default_onboarding():
return {
@@ -144,25 +138,6 @@ def save(self, *args, **kwargs):
super(User, self).save(*args, **kwargs)
-@receiver(post_save, sender=User)
-def send_welcome_slack(sender, instance, created, **kwargs):
- try:
- if created and not instance.is_bot:
- # Send message on slack as well
- if settings.SLACK_BOT_TOKEN:
- client = WebClient(token=settings.SLACK_BOT_TOKEN)
- try:
- _ = client.chat_postMessage(
- channel="#trackers",
- text=f"New user {instance.email} has signed up and begun the onboarding journey.",
- )
- except SlackApiError as e:
- print(f"Got an error: {e.response['error']}")
- return
- except Exception as e:
- capture_exception(e)
- return
-
@receiver(post_save, sender=User)
def create_user_notification(sender, instance, created, **kwargs):
diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py
index f032092504c..5c8947e73be 100644
--- a/apiserver/plane/settings/common.py
+++ b/apiserver/plane/settings/common.py
@@ -1,4 +1,5 @@
"""Global Settings"""
+
# Python imports
import os
import ssl
@@ -307,7 +308,9 @@
traces_sample_rate=1,
send_default_pii=True,
environment=os.environ.get("SENTRY_ENVIRONMENT", "development"),
- profiles_sample_rate=1.0,
+ profiles_sample_rate=float(
+ os.environ.get("SENTRY_PROFILE_SAMPLE_RATE", 0.5)
+ ),
)
diff --git a/apiserver/plane/utils/issue_search.py b/apiserver/plane/utils/issue_search.py
index d38b1f4c32a..3b6dea332ec 100644
--- a/apiserver/plane/utils/issue_search.py
+++ b/apiserver/plane/utils/issue_search.py
@@ -9,11 +9,11 @@
def search_issues(query, queryset):
- fields = ["name", "sequence_id"]
+ fields = ["name", "sequence_id", "project__identifier"]
q = Q()
for field in fields:
if field == "sequence_id" and len(query) <= 20:
- sequences = re.findall(r"[A-Za-z0-9]{1,12}-\d+", query)
+ sequences = re.findall(r"\b\d+\b", query)
for sequence_id in sequences:
q |= Q(**{"sequence_id": sequence_id})
else:
diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt
index 194bf8d903a..eb0f542012b 100644
--- a/apiserver/requirements/base.txt
+++ b/apiserver/requirements/base.txt
@@ -30,7 +30,7 @@ openpyxl==3.1.2
beautifulsoup4==4.12.2
dj-database-url==2.1.0
posthog==3.0.2
-cryptography==42.0.0
+cryptography==42.0.4
lxml==4.9.3
boto3==1.28.40
diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt
index d45f665dee8..424240cc059 100644
--- a/apiserver/runtime.txt
+++ b/apiserver/runtime.txt
@@ -1 +1 @@
-python-3.11.7
\ No newline at end of file
+python-3.11.8
\ No newline at end of file
diff --git a/deploy/1-click/README.md b/deploy/1-click/README.md
new file mode 100644
index 00000000000..88ea66c4c8e
--- /dev/null
+++ b/deploy/1-click/README.md
@@ -0,0 +1,78 @@
+# 1-Click Self-Hosting
+
+In this guide, we will walk you through the process of setting up a 1-click self-hosted environment. Self-hosting allows you to have full control over your applications and data. It's a great way to ensure privacy, control, and customization.
+
+Let's get started!
+
+## Installing Plane
+
+Installing Plane is a very easy and minimal step process.
+
+### Prerequisite
+
+- Operating System (latest): Debian / Ubuntu / Centos
+- Supported CPU Architechture: AMD64 / ARM64 / x86_64 / aarch64
+
+### Downloading Latest Stable Release
+
+```
+curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/1-click/install.sh | sh -
+
+```
+
+
+ Downloading Preview Release
+
+```
+export BRANCH=preview
+
+curl -fsSL https://raw.githubusercontent.com/makeplane/plane/preview/deploy/1-click/install.sh | sh -
+
+```
+
+NOTE: `Preview` builds do not support ARM64/AARCH64 CPU architecture
+
+
+--
+
+
+Expect this after a successful install
+
+![Install Output](images/install.png)
+
+Access the application on a browser via http://server-ip-address
+
+---
+
+### Get Control of your Plane Server Setup
+
+Plane App is available via the command `plane-app`. Running the command `plane-app --help` helps you to manage Plane
+
+![Plane Help](images/help.png)
+
+Basic Operations :
+1. Start Server using `plane-app start`
+1. Stop Server using `plane-app stop`
+1. Restart Server using `plane-app restart`
+
+Advanced Operations :
+1. Configure Plane using `plane-app --configure`. This will give you options to modify
+ - NGINX Port (default 80)
+ - Domain Name (default is the local server public IP address)
+ - File Upload Size (default 5MB)
+ - External Postgres DB Url (optional - default empty)
+ - External Redis URL (optional - default empty)
+ - AWS S3 Bucket (optional - to be configured only in case the user wants to use an S3 Bucket)
+
+1. Upgrade Plane using `plane-app --upgrade`. This will get the latest stable version of Plane files (docker-compose.yaml, .env, and docker images)
+
+1. Updating Plane App installer using `plane-app --update-installer` will update the `plane-app` utility.
+
+1. Uninstall Plane using `plane-app --uninstall`. This will uninstall the Plane application from the server and all docker containers but do not remove the data stored in Postgres, Redis, and Minio.
+
+1. Plane App can be reinstalled using `plane-app --install`.
+
+Application Data is stored in the mentioned folders :
+1. DB Data: /opt/plane/data/postgres
+1. Redis Data: /opt/plane/data/redis
+1. Minio Data: /opt/plane/data/minio
\ No newline at end of file
diff --git a/deploy/1-click/images/help.png b/deploy/1-click/images/help.png
new file mode 100644
index 00000000000..c14603a4b1e
Binary files /dev/null and b/deploy/1-click/images/help.png differ
diff --git a/deploy/1-click/images/install.png b/deploy/1-click/images/install.png
new file mode 100644
index 00000000000..c8ba1e5f829
Binary files /dev/null and b/deploy/1-click/images/install.png differ
diff --git a/deploy/1-click/install.sh b/deploy/1-click/install.sh
index 917d08fdf88..9a0eac90267 100644
--- a/deploy/1-click/install.sh
+++ b/deploy/1-click/install.sh
@@ -1,17 +1,20 @@
#!/bin/bash
+export GIT_REPO=makeplane/plane
+
# Check if the user has sudo access
if command -v curl &> /dev/null; then
sudo curl -sSL \
-o /usr/local/bin/plane-app \
- https://raw.githubusercontent.com/makeplane/plane/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
+ https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
else
sudo wget -q \
-O /usr/local/bin/plane-app \
- https://raw.githubusercontent.com/makeplane/plane/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
+ https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
fi
sudo chmod +x /usr/local/bin/plane-app
-sudo sed -i 's/export DEPLOY_BRANCH=${BRANCH:-master}/export DEPLOY_BRANCH='${BRANCH:-master}'/' /usr/local/bin/plane-app
+sudo sed -i 's@export DEPLOY_BRANCH=${BRANCH:-master}@export DEPLOY_BRANCH='${BRANCH:-master}'@' /usr/local/bin/plane-app
+sudo sed -i 's@CODE_REPO=${GIT_REPO:-makeplane/plane}@CODE_REPO='$GIT_REPO'@' /usr/local/bin/plane-app
-plane-app --help
+plane-app -i #--help
diff --git a/deploy/1-click/plane-app b/deploy/1-click/plane-app
index 2d6ef0a6f1b..e6bd24b9ec7 100644
--- a/deploy/1-click/plane-app
+++ b/deploy/1-click/plane-app
@@ -90,9 +90,9 @@ function prepare_environment() {
show_message "- Updating OS with required tools ✋" >&2
sudo "$PACKAGE_MANAGER" update -y
- sudo "$PACKAGE_MANAGER" upgrade -y
+ # sudo "$PACKAGE_MANAGER" upgrade -y
- local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap")
+ local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap" "jq")
for tool in "${required_tools[@]}"; do
if ! command -v $tool &> /dev/null; then
@@ -150,11 +150,11 @@ function download_plane() {
show_message "Downloading Plane Setup Files ✋" >&2
sudo curl -H 'Cache-Control: no-cache, no-store' \
-s -o $PLANE_INSTALL_DIR/docker-compose.yaml \
- https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s)
+ https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s)
sudo curl -H 'Cache-Control: no-cache, no-store' \
-s -o $PLANE_INSTALL_DIR/variables-upgrade.env \
- https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s)
+ https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s)
# if .env does not exists rename variables-upgrade.env to .env
if [ ! -f "$PLANE_INSTALL_DIR/.env" ]; then
@@ -202,7 +202,7 @@ function printUsageInstructions() {
}
function build_local_image() {
show_message "- Downloading Plane Source Code ✋" >&2
- REPO=https://github.com/makeplane/plane.git
+ REPO=https://github.com/$CODE_REPO.git
CURR_DIR=$PWD
PLANE_TEMP_CODE_DIR=$PLANE_INSTALL_DIR/temp
sudo rm -rf $PLANE_TEMP_CODE_DIR > /dev/null
@@ -290,40 +290,40 @@ function configure_plane() {
fi
- smtp_host=$(read_env "EMAIL_HOST")
- smtp_user=$(read_env "EMAIL_HOST_USER")
- smtp_password=$(read_env "EMAIL_HOST_PASSWORD")
- smtp_port=$(read_env "EMAIL_PORT")
- smtp_from=$(read_env "EMAIL_FROM")
- smtp_tls=$(read_env "EMAIL_USE_TLS")
- smtp_ssl=$(read_env "EMAIL_USE_SSL")
-
- SMTP_SETTINGS=$(dialog \
- --ok-label "Next" \
- --cancel-label "Skip" \
- --backtitle "Plane Configuration" \
- --title "SMTP Settings" \
- --form "" \
- 0 0 0 \
- "Host:" 1 1 "$smtp_host" 1 10 80 0 \
- "User:" 2 1 "$smtp_user" 2 10 80 0 \
- "Password:" 3 1 "$smtp_password" 3 10 80 0 \
- "Port:" 4 1 "${smtp_port:-587}" 4 10 5 0 \
- "From:" 5 1 "${smtp_from:-Mailer }" 5 10 80 0 \
- "TLS:" 6 1 "${smtp_tls:-1}" 6 10 1 1 \
- "SSL:" 7 1 "${smtp_ssl:-0}" 7 10 1 1 \
- 2>&1 1>&3)
-
- save_smtp_settings=0
- if [ $? -eq 0 ]; then
- save_smtp_settings=1
- smtp_host=$(echo "$SMTP_SETTINGS" | sed -n 1p)
- smtp_user=$(echo "$SMTP_SETTINGS" | sed -n 2p)
- smtp_password=$(echo "$SMTP_SETTINGS" | sed -n 3p)
- smtp_port=$(echo "$SMTP_SETTINGS" | sed -n 4p)
- smtp_from=$(echo "$SMTP_SETTINGS" | sed -n 5p)
- smtp_tls=$(echo "$SMTP_SETTINGS" | sed -n 6p)
- fi
+ # smtp_host=$(read_env "EMAIL_HOST")
+ # smtp_user=$(read_env "EMAIL_HOST_USER")
+ # smtp_password=$(read_env "EMAIL_HOST_PASSWORD")
+ # smtp_port=$(read_env "EMAIL_PORT")
+ # smtp_from=$(read_env "EMAIL_FROM")
+ # smtp_tls=$(read_env "EMAIL_USE_TLS")
+ # smtp_ssl=$(read_env "EMAIL_USE_SSL")
+
+ # SMTP_SETTINGS=$(dialog \
+ # --ok-label "Next" \
+ # --cancel-label "Skip" \
+ # --backtitle "Plane Configuration" \
+ # --title "SMTP Settings" \
+ # --form "" \
+ # 0 0 0 \
+ # "Host:" 1 1 "$smtp_host" 1 10 80 0 \
+ # "User:" 2 1 "$smtp_user" 2 10 80 0 \
+ # "Password:" 3 1 "$smtp_password" 3 10 80 0 \
+ # "Port:" 4 1 "${smtp_port:-587}" 4 10 5 0 \
+ # "From:" 5 1 "${smtp_from:-Mailer }" 5 10 80 0 \
+ # "TLS:" 6 1 "${smtp_tls:-1}" 6 10 1 1 \
+ # "SSL:" 7 1 "${smtp_ssl:-0}" 7 10 1 1 \
+ # 2>&1 1>&3)
+
+ # save_smtp_settings=0
+ # if [ $? -eq 0 ]; then
+ # save_smtp_settings=1
+ # smtp_host=$(echo "$SMTP_SETTINGS" | sed -n 1p)
+ # smtp_user=$(echo "$SMTP_SETTINGS" | sed -n 2p)
+ # smtp_password=$(echo "$SMTP_SETTINGS" | sed -n 3p)
+ # smtp_port=$(echo "$SMTP_SETTINGS" | sed -n 4p)
+ # smtp_from=$(echo "$SMTP_SETTINGS" | sed -n 5p)
+ # smtp_tls=$(echo "$SMTP_SETTINGS" | sed -n 6p)
+ # fi
external_pgdb_url=$(dialog \
--backtitle "Plane Configuration" \
--title "Using External Postgres Database ?" \
@@ -383,15 +383,6 @@ function configure_plane() {
domain_name: $domain_name
upload_limit: $upload_limit
- save_smtp_settings: $save_smtp_settings
- smtp_host: $smtp_host
- smtp_user: $smtp_user
- smtp_password: $smtp_password
- smtp_port: $smtp_port
- smtp_from: $smtp_from
- smtp_tls: $smtp_tls
- smtp_ssl: $smtp_ssl
-
save_aws_settings: $save_aws_settings
aws_region: $aws_region
aws_access_key: $aws_access_key
@@ -413,15 +404,15 @@ function configure_plane() {
fi
# check enable smpt settings value
- if [ $save_smtp_settings == 1 ]; then
- update_env "EMAIL_HOST" "$smtp_host"
- update_env "EMAIL_HOST_USER" "$smtp_user"
- update_env "EMAIL_HOST_PASSWORD" "$smtp_password"
- update_env "EMAIL_PORT" "$smtp_port"
- update_env "EMAIL_FROM" "$smtp_from"
- update_env "EMAIL_USE_TLS" "$smtp_tls"
- update_env "EMAIL_USE_SSL" "$smtp_ssl"
- fi
+ # if [ $save_smtp_settings == 1 ]; then
+ # update_env "EMAIL_HOST" "$smtp_host"
+ # update_env "EMAIL_HOST_USER" "$smtp_user"
+ # update_env "EMAIL_HOST_PASSWORD" "$smtp_password"
+ # update_env "EMAIL_PORT" "$smtp_port"
+ # update_env "EMAIL_FROM" "$smtp_from"
+ # update_env "EMAIL_USE_TLS" "$smtp_tls"
+ # update_env "EMAIL_USE_SSL" "$smtp_ssl"
+ # fi
# check enable aws settings value
if [[ $save_aws_settings == 1 && $aws_access_key != "" && $aws_secret_key != "" ]] ; then
@@ -493,13 +484,24 @@ function install() {
check_for_docker_images
last_installed_on=$(read_config "INSTALLATION_DATE")
- if [ "$last_installed_on" == "" ]; then
- configure_plane
- fi
- printUsageInstructions
+ # if [ "$last_installed_on" == "" ]; then
+ # configure_plane
+ # fi
+
+ update_env "NGINX_PORT" "80"
+ update_env "DOMAIN_NAME" "$MY_IP"
+ update_env "WEB_URL" "http://$MY_IP"
+ update_env "CORS_ALLOWED_ORIGINS" "http://$MY_IP"
- update_config "INSTALLATION_DATE" "$(date)"
+ update_config "INSTALLATION_DATE" "$(date '+%Y-%m-%d')"
+ if command -v crontab &> /dev/null; then
+ sudo touch /etc/cron.daily/makeplane
+ sudo chmod +x /etc/cron.daily/makeplane
+ sudo echo "0 2 * * * root /usr/local/bin/plane-app --upgrade" > /etc/cron.daily/makeplane
+ sudo crontab /etc/cron.daily/makeplane
+ fi
+
show_message "Plane Installed Successfully ✅"
show_message ""
else
@@ -539,12 +541,15 @@ function upgrade() {
prepare_environment
if [ $? -eq 0 ]; then
+ stop_server
download_plane
if [ $? -eq 0 ]; then
check_for_docker_images
upgrade_configuration
update_config "UPGRADE_DATE" "$(date)"
-
+
+ start_server
+
show_message ""
show_message "Plane Upgraded Successfully ✅"
show_message ""
@@ -601,6 +606,11 @@ function uninstall() {
sudo rm $PLANE_INSTALL_DIR/variables-upgrade.env &> /dev/null
sudo rm $PLANE_INSTALL_DIR/config.env &> /dev/null
sudo rm $PLANE_INSTALL_DIR/docker-compose.yaml &> /dev/null
+
+ if command -v crontab &> /dev/null; then
+ sudo crontab -r &> /dev/null
+ sudo rm /etc/cron.daily/makeplane &> /dev/null
+ fi
# rm -rf $PLANE_INSTALL_DIR &> /dev/null
show_message "- Configuration Cleaned ✅"
@@ -642,7 +652,39 @@ function start_server() {
while ! sudo docker compose -f "$docker_compose_file" --env-file="$env_file" ps --services --filter "status=running" --quiet | grep -q "."; do
sleep 1
done
+ # wait for migrator container to exit with status 0 before starting the application
+ migrator_container_id=$(sudo docker container ls -aq -f "name=plane-migrator")
+
+ # if migrator container is running, wait for it to exit
+ if [ -n "$migrator_container_id" ]; then
+ while sudo docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do
+ show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (Migrator in progress)" "replace_last_line" >&2
+ sleep 1
+ done
+ fi
+
+ # if migrator exit status is not 0, show error message and exit
+ if [ -n "$migrator_container_id" ]; then
+ migrator_exit_code=$(sudo docker inspect --format='{{.State.ExitCode}}' $migrator_container_id)
+ if [ $migrator_exit_code -ne 0 ]; then
+ # show_message "Migrator failed with exit code $migrator_exit_code ❌" "replace_last_line" >&2
+ show_message "Plane Server failed to start ❌" "replace_last_line" >&2
+ stop_server
+ exit 1
+ fi
+ fi
+
+ api_container_id=$(sudo docker container ls -q -f "name=plane-api")
+ while ! sudo docker logs $api_container_id 2>&1 | grep -i "Application startup complete";
+ do
+ show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (API starting)" "replace_last_line" >&2
+ sleep 1
+ done
show_message "Plane Server Started ($APP_RELEASE) ✅" "replace_last_line" >&2
+ show_message "---------------------------------------------------------------" >&2
+ show_message "Access the Plane application at http://$MY_IP" >&2
+ show_message "---------------------------------------------------------------" >&2
+
else
show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2
fi
@@ -694,7 +736,7 @@ function update_installer() {
show_message "Updating Plane Installer ✋" >&2
sudo curl -H 'Cache-Control: no-cache, no-store' \
-s -o /usr/local/bin/plane-app \
- https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s)
+ https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s)
sudo chmod +x /usr/local/bin/plane-app > /dev/null&> /dev/null
show_message "Plane Installer Updated ✅" "replace_last_line" >&2
@@ -711,12 +753,14 @@ fi
PLANE_INSTALL_DIR=/opt/plane
DATA_DIR=$PLANE_INSTALL_DIR/data
-LOG_DIR=$PLANE_INSTALL_DIR/log
+LOG_DIR=$PLANE_INSTALL_DIR/logs
+CODE_REPO=${GIT_REPO:-makeplane/plane}
OS_SUPPORTED=false
CPU_ARCH=$(uname -m)
PROGRESS_MSG=""
USE_GLOBAL_IMAGES=0
PACKAGE_MANAGER=""
+MY_IP=$(curl -s ifconfig.me)
if [[ $CPU_ARCH == "amd64" || $CPU_ARCH == "x86_64" || ( $DEPLOY_BRANCH == "master" && ( $CPU_ARCH == "arm64" || $CPU_ARCH == "aarch64" ) ) ]]; then
USE_GLOBAL_IMAGES=1
@@ -740,6 +784,9 @@ elif [ "$1" == "restart" ]; then
restart_server
elif [ "$1" == "--install" ] || [ "$1" == "-i" ]; then
install
+ start_server
+ show_message "" >&2
+ show_message "To view help, use plane-app --help " >&2
elif [ "$1" == "--configure" ] || [ "$1" == "-c" ]; then
configure_plane
printUsageInstructions
diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml
index 60861878cb5..07e5ea9f623 100644
--- a/deploy/selfhost/docker-compose.yml
+++ b/deploy/selfhost/docker-compose.yml
@@ -56,8 +56,6 @@ x-app-env : &app-env
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
-
-
services:
web:
<<: *app-env
@@ -138,7 +136,6 @@ services:
command: postgres -c 'max_connections=1000'
volumes:
- pgdata:/var/lib/postgresql/data
-
plane-redis:
<<: *app-env
image: redis:6.2.7-alpine
diff --git a/deploy/selfhost/install.sh b/deploy/selfhost/install.sh
index 30f2d15d72c..16b6ea7c39c 100755
--- a/deploy/selfhost/install.sh
+++ b/deploy/selfhost/install.sh
@@ -13,6 +13,23 @@ YELLOW='\033[1;33m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color
+function print_header() {
+clear
+
+cat <<"EOF"
+---------------------------------------
+ ____ _
+| _ \| | __ _ _ __ ___
+| |_) | |/ _` | '_ \ / _ \
+| __/| | (_| | | | | __/
+|_| |_|\__,_|_| |_|\___|
+
+---------------------------------------
+Project management tool from the future
+---------------------------------------
+EOF
+}
+
function buildLocalImage() {
if [ "$1" == "--force-build" ]; then
DO_BUILD="1"
@@ -110,7 +127,7 @@ function download() {
exit 0
fi
else
- docker compose -f $PLANE_INSTALL_DIR/docker-compose.yaml pull
+ docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH pull
fi
echo ""
@@ -121,19 +138,48 @@ function download() {
}
function startServices() {
- cd $PLANE_INSTALL_DIR
- docker compose up -d --quiet-pull
- cd $SCRIPT_DIR
+ docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH up -d --quiet-pull
+
+ local migrator_container_id=$(docker container ls -aq -f "name=plane-app-migrator")
+ if [ -n "$migrator_container_id" ]; then
+ local idx=0
+ while docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do
+ local message=">>> Waiting for Data Migration to finish"
+ local dots=$(printf '%*s' $idx | tr ' ' '.')
+ echo -ne "\r$message$dots"
+ ((idx++))
+ sleep 1
+ done
+ fi
+ printf "\r\033[K"
+
+ # if migrator exit status is not 0, show error message and exit
+ if [ -n "$migrator_container_id" ]; then
+ local migrator_exit_code=$(docker inspect --format='{{.State.ExitCode}}' $migrator_container_id)
+ if [ $migrator_exit_code -ne 0 ]; then
+ echo "Plane Server failed to start ❌"
+ stopServices
+ exit 1
+ fi
+ fi
+
+ local api_container_id=$(docker container ls -q -f "name=plane-app-api")
+ local idx2=0
+ while ! docker logs $api_container_id 2>&1 | grep -m 1 -i "Application startup complete" | grep -q ".";
+ do
+ local message=">>> Waiting for API Service to Start"
+ local dots=$(printf '%*s' $idx2 | tr ' ' '.')
+ echo -ne "\r$message$dots"
+ ((idx2++))
+ sleep 1
+ done
+ printf "\r\033[K"
}
function stopServices() {
- cd $PLANE_INSTALL_DIR
- docker compose down
- cd $SCRIPT_DIR
+ docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH down
}
function restartServices() {
- cd $PLANE_INSTALL_DIR
- docker compose restart
- cd $SCRIPT_DIR
+ docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH restart
}
function upgrade() {
echo "***** STOPPING SERVICES ****"
@@ -144,47 +190,137 @@ function upgrade() {
download
echo "***** PLEASE VALIDATE AND START SERVICES ****"
+}
+function viewSpecificLogs(){
+ local SERVICE_NAME=$1
+ if docker-compose -f $DOCKER_FILE_PATH ps | grep -q "$SERVICE_NAME"; then
+ echo "Service '$SERVICE_NAME' is running."
+ else
+ echo "Service '$SERVICE_NAME' is not running."
+ fi
+
+ docker compose -f $DOCKER_FILE_PATH logs -f $SERVICE_NAME
+}
+function viewLogs(){
+
+ ARG_SERVICE_NAME=$2
+
+ if [ -z "$ARG_SERVICE_NAME" ];
+ then
+ echo
+ echo "Select a Service you want to view the logs for:"
+ echo " 1) Web"
+ echo " 2) Space"
+ echo " 3) API"
+ echo " 4) Worker"
+ echo " 5) Beat-Worker"
+ echo " 6) Migrator"
+ echo " 7) Proxy"
+ echo " 8) Redis"
+ echo " 9) Postgres"
+ echo " 10) Minio"
+ echo " 0) Back to Main Menu"
+ echo
+ read -p "Service: " DOCKER_SERVICE_NAME
+
+ until (( DOCKER_SERVICE_NAME >= 0 && DOCKER_SERVICE_NAME <= 10 )); do
+ echo "Invalid selection. Please enter a number between 1 and 11."
+ read -p "Service: " DOCKER_SERVICE_NAME
+ done
+
+ if [ -z "$DOCKER_SERVICE_NAME" ];
+ then
+ echo "INVALID SERVICE NAME SUPPLIED"
+ else
+ case $DOCKER_SERVICE_NAME in
+ 1) viewSpecificLogs "web";;
+ 2) viewSpecificLogs "space";;
+ 3) viewSpecificLogs "api";;
+ 4) viewSpecificLogs "worker";;
+ 5) viewSpecificLogs "beat-worker";;
+ 6) viewSpecificLogs "migrator";;
+ 7) viewSpecificLogs "proxy";;
+ 8) viewSpecificLogs "plane-redis";;
+ 9) viewSpecificLogs "plane-db";;
+ 10) viewSpecificLogs "plane-minio";;
+ 0) askForAction;;
+ *) echo "INVALID SERVICE NAME SUPPLIED";;
+ esac
+ fi
+ elif [ -n "$ARG_SERVICE_NAME" ];
+ then
+ ARG_SERVICE_NAME=$(echo "$ARG_SERVICE_NAME" | tr '[:upper:]' '[:lower:]')
+ case $ARG_SERVICE_NAME in
+ web) viewSpecificLogs "web";;
+ space) viewSpecificLogs "space";;
+ api) viewSpecificLogs "api";;
+ worker) viewSpecificLogs "worker";;
+ beat-worker) viewSpecificLogs "beat-worker";;
+ migrator) viewSpecificLogs "migrator";;
+ proxy) viewSpecificLogs "proxy";;
+ redis) viewSpecificLogs "plane-redis";;
+ postgres) viewSpecificLogs "plane-db";;
+ minio) viewSpecificLogs "plane-minio";;
+ *) echo "INVALID SERVICE NAME SUPPLIED";;
+ esac
+ else
+ echo "INVALID SERVICE NAME SUPPLIED"
+ fi
}
function askForAction() {
- echo
- echo "Select a Action you want to perform:"
- echo " 1) Install (${CPU_ARCH})"
- echo " 2) Start"
- echo " 3) Stop"
- echo " 4) Restart"
- echo " 5) Upgrade"
- echo " 6) Exit"
- echo
- read -p "Action [2]: " ACTION
- until [[ -z "$ACTION" || "$ACTION" =~ ^[1-6]$ ]]; do
- echo "$ACTION: invalid selection."
+ local DEFAULT_ACTION=$1
+
+ if [ -z "$DEFAULT_ACTION" ];
+ then
+ echo
+ echo "Select a Action you want to perform:"
+ echo " 1) Install (${CPU_ARCH})"
+ echo " 2) Start"
+ echo " 3) Stop"
+ echo " 4) Restart"
+ echo " 5) Upgrade"
+ echo " 6) View Logs"
+ echo " 7) Exit"
+ echo
read -p "Action [2]: " ACTION
- done
- echo
+ until [[ -z "$ACTION" || "$ACTION" =~ ^[1-7]$ ]]; do
+ echo "$ACTION: invalid selection."
+ read -p "Action [2]: " ACTION
+ done
+ if [ -z "$ACTION" ];
+ then
+ ACTION=2
+ fi
+ echo
+ fi
- if [ "$ACTION" == "1" ]
+ if [ "$ACTION" == "1" ] || [ "$DEFAULT_ACTION" == "install" ]
then
install
askForAction
- elif [ "$ACTION" == "2" ] || [ "$ACTION" == "" ]
+ elif [ "$ACTION" == "2" ] || [ "$DEFAULT_ACTION" == "start" ]
then
startServices
askForAction
- elif [ "$ACTION" == "3" ]
+ elif [ "$ACTION" == "3" ] || [ "$DEFAULT_ACTION" == "stop" ]
then
stopServices
askForAction
- elif [ "$ACTION" == "4" ]
+ elif [ "$ACTION" == "4" ] || [ "$DEFAULT_ACTION" == "restart" ]
then
restartServices
askForAction
- elif [ "$ACTION" == "5" ]
+ elif [ "$ACTION" == "5" ] || [ "$DEFAULT_ACTION" == "upgrade" ]
then
upgrade
askForAction
- elif [ "$ACTION" == "6" ]
+ elif [ "$ACTION" == "6" ] || [ "$DEFAULT_ACTION" == "logs" ]
+ then
+ viewLogs $@
+ askForAction
+ elif [ "$ACTION" == "7" ]
then
exit 0
else
@@ -217,4 +353,8 @@ then
fi
mkdir -p $PLANE_INSTALL_DIR/archive
-askForAction
+DOCKER_FILE_PATH=$PLANE_INSTALL_DIR/docker-compose.yaml
+DOCKER_ENV_PATH=$PLANE_INSTALL_DIR/.env
+
+print_header
+askForAction $@
diff --git a/package.json b/package.json
index 762ce322a0d..9239a9b41fa 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"repository": "https://github.com/makeplane/plane.git",
- "version": "0.15.1",
+ "version": "0.16.0",
"license": "AGPL-3.0",
"private": true,
"workspaces": [
diff --git a/packages/editor/core/package.json b/packages/editor/core/package.json
index 7f7f4831a03..fcb6b57bbb2 100644
--- a/packages/editor/core/package.json
+++ b/packages/editor/core/package.json
@@ -1,6 +1,6 @@
{
"name": "@plane/editor-core",
- "version": "0.15.1",
+ "version": "0.16.0",
"description": "Core Editor that powers Plane",
"private": true,
"main": "./dist/index.mjs",
diff --git a/packages/editor/core/src/lib/editor-commands.ts b/packages/editor/core/src/lib/editor-commands.ts
index 4a56f07c2dc..6524d1ff58a 100644
--- a/packages/editor/core/src/lib/editor-commands.ts
+++ b/packages/editor/core/src/lib/editor-commands.ts
@@ -97,8 +97,8 @@ export const insertTableCommand = (editor: Editor, range?: Range) => {
}
}
}
- if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run();
- else editor.chain().focus().insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run();
+ if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3 }).run();
+ else editor.chain().focus().insertTable({ rows: 3, cols: 3 }).run();
};
export const unsetLinkEditor = (editor: Editor) => {
diff --git a/packages/editor/core/src/styles/editor.css b/packages/editor/core/src/styles/editor.css
index b0d2a10213a..dbbea671eba 100644
--- a/packages/editor/core/src/styles/editor.css
+++ b/packages/editor/core/src/styles/editor.css
@@ -170,68 +170,6 @@ ul[data-type="taskList"] li[data-checked="true"] > div > p {
}
}
-#editor-container {
- table {
- border-collapse: collapse;
- table-layout: fixed;
- margin: 0.5em 0 0.5em 0;
-
- border: 1px solid rgb(var(--color-border-200));
- width: 100%;
-
- td,
- th {
- min-width: 1em;
- border: 1px solid rgb(var(--color-border-200));
- padding: 10px 15px;
- vertical-align: top;
- box-sizing: border-box;
- position: relative;
- transition: background-color 0.3s ease;
-
- > * {
- margin-bottom: 0;
- }
- }
-
- th {
- font-weight: bold;
- text-align: left;
- background-color: rgb(var(--color-primary-100));
- }
-
- td:hover {
- background-color: rgba(var(--color-primary-300), 0.1);
- }
-
- .selectedCell:after {
- z-index: 2;
- position: absolute;
- content: "";
- left: 0;
- right: 0;
- top: 0;
- bottom: 0;
- background-color: rgba(var(--color-primary-300), 0.1);
- pointer-events: none;
- }
-
- .column-resize-handle {
- position: absolute;
- right: -2px;
- top: 0;
- bottom: -2px;
- width: 2px;
- background-color: rgb(var(--color-primary-400));
- pointer-events: none;
- }
- }
-}
-
-.tableWrapper {
- overflow-x: auto;
-}
-
.resize-cursor {
cursor: ew-resize;
cursor: col-resize;
diff --git a/packages/editor/core/src/styles/table.css b/packages/editor/core/src/styles/table.css
index 8a47a8c59fd..ca384d34fc6 100644
--- a/packages/editor/core/src/styles/table.css
+++ b/packages/editor/core/src/styles/table.css
@@ -9,15 +9,15 @@
border-collapse: collapse;
table-layout: fixed;
margin: 0;
- margin-bottom: 3rem;
- border: 1px solid rgba(var(--color-border-200));
+ margin-bottom: 1rem;
+ border: 2px solid rgba(var(--color-border-300));
width: 100%;
}
.tableWrapper table td,
.tableWrapper table th {
min-width: 1em;
- border: 1px solid rgba(var(--color-border-200));
+ border: 1px solid rgba(var(--color-border-300));
padding: 10px 15px;
vertical-align: top;
box-sizing: border-box;
@@ -43,7 +43,8 @@
.tableWrapper table th {
font-weight: bold;
text-align: left;
- background-color: rgba(var(--color-primary-100));
+ background-color: #d9e4ff;
+ color: #171717;
}
.tableWrapper table th * {
@@ -62,6 +63,35 @@
pointer-events: none;
}
+.colorPicker {
+ display: grid;
+ padding: 8px 8px;
+ grid-template-columns: repeat(6, 1fr);
+ gap: 5px;
+}
+
+.colorPickerLabel {
+ font-size: 0.85rem;
+ color: #6b7280;
+ padding: 8px 8px;
+ padding-bottom: 0px;
+}
+
+.colorPickerItem {
+ margin: 2px 0px;
+ width: 24px;
+ height: 24px;
+ border-radius: 4px;
+ border: none;
+ cursor: pointer;
+}
+
+.divider {
+ background-color: #e5e7eb;
+ height: 1px;
+ margin: 3px 0;
+}
+
.tableWrapper table .column-resize-handle {
position: absolute;
right: -2px;
@@ -69,7 +99,7 @@
bottom: -2px;
width: 4px;
z-index: 99;
- background-color: rgba(var(--color-primary-400));
+ background-color: #d9e4ff;
pointer-events: none;
}
@@ -112,7 +142,7 @@
}
.tableWrapper .tableControls .rowsControlDiv {
- background-color: rgba(var(--color-primary-100));
+ background-color: #d9e4ff;
border: 1px solid rgba(var(--color-border-200));
border-radius: 2px;
background-size: 1.25rem;
@@ -127,7 +157,7 @@
}
.tableWrapper .tableControls .columnsControlDiv {
- background-color: rgba(var(--color-primary-100));
+ background-color: #d9e4ff;
border: 1px solid rgba(var(--color-border-200));
border-radius: 2px;
background-size: 1.25rem;
@@ -144,10 +174,12 @@
.tableWrapper .tableControls .tableColorPickerToolbox {
border: 1px solid rgba(var(--color-border-300));
background-color: rgba(var(--color-background-100));
+ border-radius: 5px;
+ box-shadow: 0px 2px 4px rgba(0, 0, 0, 0.1);
padding: 0.25rem;
display: flex;
flex-direction: column;
- width: 200px;
+ width: max-content;
gap: 0.25rem;
}
@@ -158,7 +190,7 @@
align-items: center;
gap: 0.5rem;
border: none;
- padding: 0.1rem;
+ padding: 0.3rem 0.5rem 0.1rem 0.1rem;
border-radius: 4px;
cursor: pointer;
transition: all 0.2s;
@@ -173,9 +205,7 @@
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .iconContainer,
.tableWrapper .tableControls .tableToolbox .toolboxItem .colorContainer,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .colorContainer {
- border: 1px solid rgba(var(--color-border-300));
- border-radius: 3px;
- padding: 4px;
+ padding: 4px 0px;
display: flex;
align-items: center;
justify-content: center;
@@ -187,8 +217,8 @@
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .iconContainer svg,
.tableWrapper .tableControls .tableToolbox .toolboxItem .colorContainer svg,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .colorContainer svg {
- width: 2rem;
- height: 2rem;
+ width: 1rem;
+ height: 1rem;
}
.tableToolbox {
diff --git a/packages/editor/core/src/ui/extensions/index.tsx b/packages/editor/core/src/ui/extensions/index.tsx
index 5bfba3b0f55..190731fe0b6 100644
--- a/packages/editor/core/src/ui/extensions/index.tsx
+++ b/packages/editor/core/src/ui/extensions/index.tsx
@@ -25,7 +25,8 @@ import { DeleteImage } from "src/types/delete-image";
import { IMentionSuggestion } from "src/types/mention-suggestion";
import { RestoreImage } from "src/types/restore-image";
import { CustomLinkExtension } from "src/ui/extensions/custom-link";
-import { CustomCodeInlineExtension } from "./code-inline";
+import { CustomCodeInlineExtension } from "src/ui/extensions/code-inline";
+import { CustomTypographyExtension } from "src/ui/extensions/typography";
export const CoreEditorExtensions = (
mentionConfig: {
@@ -79,6 +80,7 @@ export const CoreEditorExtensions = (
"text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer",
},
}),
+ CustomTypographyExtension,
ImageExtension(deleteFile, restoreFile, cancelUploadImage).configure({
HTMLAttributes: {
class: "rounded-lg border border-custom-border-300",
diff --git a/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts b/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts
index aedb59411a7..403bd3f02c7 100644
--- a/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts
+++ b/packages/editor/core/src/ui/extensions/table/table-cell/table-cell.ts
@@ -13,7 +13,7 @@ export const TableCell = Node.create({
};
},
- content: "paragraph+",
+ content: "block+",
addAttributes() {
return {
@@ -33,7 +33,10 @@ export const TableCell = Node.create({
},
},
background: {
- default: "none",
+ default: null,
+ },
+ textColor: {
+ default: null,
},
};
},
@@ -50,7 +53,7 @@ export const TableCell = Node.create({
return [
"td",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
- style: `background-color: ${node.attrs.background}`,
+ style: `background-color: ${node.attrs.background}; color: ${node.attrs.textColor}`,
}),
0,
];
diff --git a/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts b/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts
index c0decdbf803..bd994f467d5 100644
--- a/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts
+++ b/packages/editor/core/src/ui/extensions/table/table-header/table-header.ts
@@ -33,7 +33,7 @@ export const TableHeader = Node.create({
},
},
background: {
- default: "rgb(var(--color-primary-100))",
+ default: "none",
},
};
},
diff --git a/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts b/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts
index 28c9a9a48e8..f961c058246 100644
--- a/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts
+++ b/packages/editor/core/src/ui/extensions/table/table-row/table-row.ts
@@ -13,6 +13,17 @@ export const TableRow = Node.create({
};
},
+ addAttributes() {
+ return {
+ background: {
+ default: null,
+ },
+ textColor: {
+ default: null,
+ },
+ };
+ },
+
content: "(tableCell | tableHeader)*",
tableRole: "row",
@@ -22,6 +33,12 @@ export const TableRow = Node.create({
},
renderHTML({ HTMLAttributes }) {
- return ["tr", mergeAttributes(this.options.HTMLAttributes, HTMLAttributes), 0];
+ const style = HTMLAttributes.background
+ ? `background-color: ${HTMLAttributes.background}; color: ${HTMLAttributes.textColor}`
+ : "";
+
+ const attributes = mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { style });
+
+ return ["tr", attributes, 0];
},
});
diff --git a/packages/editor/core/src/ui/extensions/table/table/icons.ts b/packages/editor/core/src/ui/extensions/table/table/icons.ts
index c08710ec328..f73c55c09f4 100644
--- a/packages/editor/core/src/ui/extensions/table/table/icons.ts
+++ b/packages/editor/core/src/ui/extensions/table/table/icons.ts
@@ -1,7 +1,7 @@
export const icons = {
colorPicker: ` `,
- deleteColumn: ` `,
- deleteRow: ` `,
+ deleteColumn: ` `,
+ deleteRow: ` `,
insertLeftTableIcon: `
`,
+ toggleColumnHeader: ` `,
+ toggleRowHeader: ` `,
insertBottomTableIcon: ` = {
placement: "right",
};
-function setCellsBackgroundColor(editor: Editor, backgroundColor: string) {
+function setCellsBackgroundColor(editor: Editor, color: { backgroundColor: string; textColor: string }) {
return editor
.chain()
.focus()
.updateAttributes("tableCell", {
- background: backgroundColor,
- })
- .updateAttributes("tableHeader", {
- background: backgroundColor,
+ background: color.backgroundColor,
+ textColor: color.textColor,
})
.run();
}
+function setTableRowBackgroundColor(editor: Editor, color: { backgroundColor: string; textColor: string }) {
+ const { state, dispatch } = editor.view;
+ const { selection } = state;
+ if (!(selection instanceof CellSelection)) {
+ return false;
+ }
+
+ // Get the position of the hovered cell in the selection to determine the row.
+ const hoveredCell = selection.$headCell || selection.$anchorCell;
+
+ // Find the depth of the table row node
+ let rowDepth = hoveredCell.depth;
+ while (rowDepth > 0 && hoveredCell.node(rowDepth).type.name !== "tableRow") {
+ rowDepth--;
+ }
+
+ // If we couldn't find a tableRow node, we can't set the background color
+ if (hoveredCell.node(rowDepth).type.name !== "tableRow") {
+ return false;
+ }
+
+ // Get the position where the table row starts
+ const rowStartPos = hoveredCell.start(rowDepth);
+
+ // Create a transaction that sets the background color on the tableRow node.
+ const tr = state.tr.setNodeMarkup(rowStartPos - 1, null, {
+ ...hoveredCell.node(rowDepth).attrs,
+ background: color.backgroundColor,
+ textColor: color.textColor,
+ });
+
+ dispatch(tr);
+ return true;
+}
+
const columnsToolboxItems: ToolboxItem[] = [
{
- label: "Add Column Before",
+ label: "Toggle column header",
+ icon: icons.toggleColumnHeader,
+ action: ({ editor }: { editor: Editor }) => editor.chain().focus().toggleHeaderColumn().run(),
+ },
+ {
+ label: "Add column before",
icon: icons.insertLeftTableIcon,
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addColumnBefore().run(),
},
{
- label: "Add Column After",
+ label: "Add column after",
icon: icons.insertRightTableIcon,
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addColumnAfter().run(),
},
{
- label: "Pick Column Color",
- icon: icons.colorPicker,
- action: ({
- editor,
- triggerButton,
- controlsContainer,
- }: {
- editor: Editor;
- triggerButton: HTMLElement;
- controlsContainer: Element;
- }) => {
- createColorPickerToolbox({
- triggerButton,
- tippyOptions: {
- appendTo: controlsContainer,
- },
- onSelectColor: (color) => setCellsBackgroundColor(editor, color),
- });
- },
+ label: "Pick color",
+ icon: "", // No icon needed for color picker
+ action: (args: any) => {}, // Placeholder action; actual color picking is handled in `createToolbox`
},
{
- label: "Delete Column",
+ label: "Delete column",
icon: icons.deleteColumn,
action: ({ editor }: { editor: Editor }) => editor.chain().focus().deleteColumn().run(),
},
@@ -135,35 +157,24 @@ const columnsToolboxItems: ToolboxItem[] = [
const rowsToolboxItems: ToolboxItem[] = [
{
- label: "Add Row Above",
+ label: "Toggle row header",
+ icon: icons.toggleRowHeader,
+ action: ({ editor }: { editor: Editor }) => editor.chain().focus().toggleHeaderRow().run(),
+ },
+ {
+ label: "Add row above",
icon: icons.insertTopTableIcon,
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addRowBefore().run(),
},
{
- label: "Add Row Below",
+ label: "Add row below",
icon: icons.insertBottomTableIcon,
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addRowAfter().run(),
},
{
- label: "Pick Row Color",
- icon: icons.colorPicker,
- action: ({
- editor,
- triggerButton,
- controlsContainer,
- }: {
- editor: Editor;
- triggerButton: HTMLButtonElement;
- controlsContainer: Element | "parent" | ((ref: Element) => Element) | undefined;
- }) => {
- createColorPickerToolbox({
- triggerButton,
- tippyOptions: {
- appendTo: controlsContainer,
- },
- onSelectColor: (color) => setCellsBackgroundColor(editor, color),
- });
- },
+ label: "Pick color",
+ icon: "",
+ action: (args: any) => {}, // Placeholder action; actual color picking is handled in `createToolbox`
},
{
label: "Delete Row",
@@ -176,107 +187,62 @@ function createToolbox({
triggerButton,
items,
tippyOptions,
+ onSelectColor,
onClickItem,
+ colors,
}: {
triggerButton: Element | null;
items: ToolboxItem[];
tippyOptions: any;
onClickItem: (item: ToolboxItem) => void;
+ onSelectColor: (color: { backgroundColor: string; textColor: string }) => void;
+ colors: { [key: string]: { backgroundColor: string; textColor: string; icon?: string } };
}): Instance {
// @ts-expect-error
const toolbox = tippy(triggerButton, {
content: h(
"div",
{ className: "tableToolbox" },
- items.map((item) =>
- h(
- "div",
- {
- className: "toolboxItem",
- itemType: "button",
- onClick() {
- onClickItem(item);
- },
- },
- [
- h("div", {
- className: "iconContainer",
- innerHTML: item.icon,
- }),
- h("div", { className: "label" }, item.label),
- ]
- )
- )
- ),
- ...tippyOptions,
- });
-
- return Array.isArray(toolbox) ? toolbox[0] : toolbox;
-}
-
-function createColorPickerToolbox({
- triggerButton,
- tippyOptions,
- onSelectColor = () => {},
-}: {
- triggerButton: HTMLElement;
- tippyOptions: Partial;
- onSelectColor?: (color: string) => void;
-}) {
- const items = {
- Default: "rgb(var(--color-primary-100))",
- Orange: "#FFE5D1",
- Grey: "#F1F1F1",
- Yellow: "#FEF3C7",
- Green: "#DCFCE7",
- Red: "#FFDDDD",
- Blue: "#D9E4FF",
- Pink: "#FFE8FA",
- Purple: "#E8DAFB",
- };
-
- const colorPicker = tippy(triggerButton, {
- ...defaultTippyOptions,
- content: h(
- "div",
- { className: "tableColorPickerToolbox" },
- Object.entries(items).map(([key, value]) =>
- h(
- "div",
- {
- className: "toolboxItem",
- itemType: "button",
- onClick: () => {
- onSelectColor(value);
- colorPicker.hide();
- },
- },
- [
- h("div", {
- className: "colorContainer",
- style: {
- backgroundColor: value,
- },
- }),
+ items.map((item, index) => {
+ if (item.label === "Pick color") {
+ return h("div", { className: "flex flex-col" }, [
+ h("div", { className: "divider" }),
+ h("div", { className: "colorPickerLabel" }, item.label),
h(
"div",
- {
- className: "label",
- },
- key
+ { className: "colorPicker grid" },
+ Object.entries(colors).map(([colorName, colorValue]) =>
+ h("div", {
+ className: "colorPickerItem",
+ style: `background-color: ${colorValue.backgroundColor};
+ color: ${colorValue.textColor || "inherit"};`,
+ innerHTML: colorValue?.icon || "",
+ onClick: () => onSelectColor(colorValue),
+ })
+ )
),
- ]
- )
- )
+ h("div", { className: "divider" }),
+ ]);
+ } else {
+ return h(
+ "div",
+ {
+ className: "toolboxItem",
+ itemType: "div",
+ onClick: () => onClickItem(item),
+ },
+ [
+ h("div", { className: "iconContainer", innerHTML: item.icon }),
+ h("div", { className: "label" }, item.label),
+ ]
+ );
+ }
+ })
),
- onHidden: (instance) => {
- instance.destroy();
- },
- showOnCreate: true,
...tippyOptions,
});
- return colorPicker;
+ return Array.isArray(toolbox) ? toolbox[0] : toolbox;
}
export class TableView implements NodeView {
@@ -347,10 +313,27 @@ export class TableView implements NodeView {
this.rowsControl,
this.columnsControl
);
+ const columnColors = {
+ Blue: { backgroundColor: "#D9E4FF", textColor: "#171717" },
+ Orange: { backgroundColor: "#FFEDD5", textColor: "#171717" },
+ Grey: { backgroundColor: "#F1F1F1", textColor: "#171717" },
+ Yellow: { backgroundColor: "#FEF3C7", textColor: "#171717" },
+ Green: { backgroundColor: "#DCFCE7", textColor: "#171717" },
+ Red: { backgroundColor: "#FFDDDD", textColor: "#171717" },
+ Pink: { backgroundColor: "#FFE8FA", textColor: "#171717" },
+ Purple: { backgroundColor: "#E8DAFB", textColor: "#171717" },
+ None: {
+ backgroundColor: "none",
+ textColor: "none",
+ icon: ` `,
+ },
+ };
this.columnsToolbox = createToolbox({
triggerButton: this.columnsControl.querySelector(".columnsControlDiv"),
items: columnsToolboxItems,
+ colors: columnColors,
+ onSelectColor: (color) => setCellsBackgroundColor(this.editor, color),
tippyOptions: {
...defaultTippyOptions,
appendTo: this.controls,
@@ -368,10 +351,12 @@ export class TableView implements NodeView {
this.rowsToolbox = createToolbox({
triggerButton: this.rowsControl.firstElementChild,
items: rowsToolboxItems,
+ colors: columnColors,
tippyOptions: {
...defaultTippyOptions,
appendTo: this.controls,
},
+ onSelectColor: (color) => setTableRowBackgroundColor(editor, color),
onClickItem: (item) => {
item.action({
editor: this.editor,
@@ -383,8 +368,6 @@ export class TableView implements NodeView {
});
}
- // Table
-
this.colgroup = h(
"colgroup",
null,
@@ -437,16 +420,19 @@ export class TableView implements NodeView {
}
updateControls() {
- const { hoveredTable: table, hoveredCell: cell } = Object.values(this.decorations).reduce((acc, curr) => {
- if (curr.spec.hoveredCell !== undefined) {
- acc["hoveredCell"] = curr.spec.hoveredCell;
- }
+ const { hoveredTable: table, hoveredCell: cell } = Object.values(this.decorations).reduce(
+ (acc, curr) => {
+ if (curr.spec.hoveredCell !== undefined) {
+ acc["hoveredCell"] = curr.spec.hoveredCell;
+ }
- if (curr.spec.hoveredTable !== undefined) {
- acc["hoveredTable"] = curr.spec.hoveredTable;
- }
- return acc;
- }, {} as Record) as any;
+ if (curr.spec.hoveredTable !== undefined) {
+ acc["hoveredTable"] = curr.spec.hoveredTable;
+ }
+ return acc;
+ },
+ {} as Record
+ ) as any;
if (table === undefined || cell === undefined) {
return this.root.classList.add("controls--disabled");
@@ -457,12 +443,12 @@ export class TableView implements NodeView {
const cellDom = this.editor.view.nodeDOM(cell.pos) as HTMLElement;
- if (!this.table) {
+ if (!this.table || !cellDom) {
return;
}
- const tableRect = this.table.getBoundingClientRect();
- const cellRect = cellDom.getBoundingClientRect();
+ const tableRect = this.table?.getBoundingClientRect();
+ const cellRect = cellDom?.getBoundingClientRect();
if (this.columnsControl) {
this.columnsControl.style.left = `${cellRect.left - tableRect.left - this.table.parentElement!.scrollLeft}px`;
diff --git a/packages/editor/core/src/ui/extensions/table/table/table.ts b/packages/editor/core/src/ui/extensions/table/table/table.ts
index 5600fd82a7e..ef595eee209 100644
--- a/packages/editor/core/src/ui/extensions/table/table/table.ts
+++ b/packages/editor/core/src/ui/extensions/table/table/table.ts
@@ -107,10 +107,9 @@ export const Table = Node.create({
addCommands() {
return {
insertTable:
- ({ rows = 3, cols = 3, withHeaderRow = true } = {}) =>
+ ({ rows = 3, cols = 3, withHeaderRow = false } = {}) =>
({ tr, dispatch, editor }) => {
const node = createTable(editor.schema, rows, cols, withHeaderRow);
-
if (dispatch) {
const offset = tr.selection.anchor + 1;
diff --git a/packages/editor/core/src/ui/extensions/typography/index.ts b/packages/editor/core/src/ui/extensions/typography/index.ts
new file mode 100644
index 00000000000..78af3c46e2c
--- /dev/null
+++ b/packages/editor/core/src/ui/extensions/typography/index.ts
@@ -0,0 +1,109 @@
+import { Extension } from "@tiptap/core";
+import {
+ TypographyOptions,
+ emDash,
+ ellipsis,
+ leftArrow,
+ rightArrow,
+ copyright,
+ trademark,
+ servicemark,
+ registeredTrademark,
+ oneHalf,
+ plusMinus,
+ notEqual,
+ laquo,
+ raquo,
+ multiplication,
+ superscriptTwo,
+ superscriptThree,
+ oneQuarter,
+ threeQuarters,
+ impliesArrowRight,
+} from "src/ui/extensions/typography/inputRules";
+
+export const CustomTypographyExtension = Extension.create({
+ name: "typography",
+
+ addInputRules() {
+ const rules = [];
+
+ if (this.options.emDash !== false) {
+ rules.push(emDash(this.options.emDash));
+ }
+
+ if (this.options.impliesArrowRight !== false) {
+ rules.push(impliesArrowRight(this.options.impliesArrowRight));
+ }
+
+ if (this.options.ellipsis !== false) {
+ rules.push(ellipsis(this.options.ellipsis));
+ }
+
+ if (this.options.leftArrow !== false) {
+ rules.push(leftArrow(this.options.leftArrow));
+ }
+
+ if (this.options.rightArrow !== false) {
+ rules.push(rightArrow(this.options.rightArrow));
+ }
+
+ if (this.options.copyright !== false) {
+ rules.push(copyright(this.options.copyright));
+ }
+
+ if (this.options.trademark !== false) {
+ rules.push(trademark(this.options.trademark));
+ }
+
+ if (this.options.servicemark !== false) {
+ rules.push(servicemark(this.options.servicemark));
+ }
+
+ if (this.options.registeredTrademark !== false) {
+ rules.push(registeredTrademark(this.options.registeredTrademark));
+ }
+
+ if (this.options.oneHalf !== false) {
+ rules.push(oneHalf(this.options.oneHalf));
+ }
+
+ if (this.options.plusMinus !== false) {
+ rules.push(plusMinus(this.options.plusMinus));
+ }
+
+ if (this.options.notEqual !== false) {
+ rules.push(notEqual(this.options.notEqual));
+ }
+
+ if (this.options.laquo !== false) {
+ rules.push(laquo(this.options.laquo));
+ }
+
+ if (this.options.raquo !== false) {
+ rules.push(raquo(this.options.raquo));
+ }
+
+ if (this.options.multiplication !== false) {
+ rules.push(multiplication(this.options.multiplication));
+ }
+
+ if (this.options.superscriptTwo !== false) {
+ rules.push(superscriptTwo(this.options.superscriptTwo));
+ }
+
+ if (this.options.superscriptThree !== false) {
+ rules.push(superscriptThree(this.options.superscriptThree));
+ }
+
+ if (this.options.oneQuarter !== false) {
+ rules.push(oneQuarter(this.options.oneQuarter));
+ }
+
+ if (this.options.threeQuarters !== false) {
+ rules.push(threeQuarters(this.options.threeQuarters));
+ }
+
+ return rules;
+ },
+});
diff --git a/packages/editor/core/src/ui/extensions/typography/inputRules.ts b/packages/editor/core/src/ui/extensions/typography/inputRules.ts
new file mode 100644
index 00000000000..f528e92426d
--- /dev/null
+++ b/packages/editor/core/src/ui/extensions/typography/inputRules.ts
@@ -0,0 +1,137 @@
+import { textInputRule } from "@tiptap/core";
+
+export interface TypographyOptions {
+ emDash: false | string;
+ ellipsis: false | string;
+ leftArrow: false | string;
+ rightArrow: false | string;
+ copyright: false | string;
+ trademark: false | string;
+ servicemark: false | string;
+ registeredTrademark: false | string;
+ oneHalf: false | string;
+ plusMinus: false | string;
+ notEqual: false | string;
+ laquo: false | string;
+ raquo: false | string;
+ multiplication: false | string;
+ superscriptTwo: false | string;
+ superscriptThree: false | string;
+ oneQuarter: false | string;
+ threeQuarters: false | string;
+ impliesArrowRight: false | string;
+}
+
+export const emDash = (override?: string) =>
+ textInputRule({
+ find: /--$/,
+ replace: override ?? "—",
+ });
+
+export const impliesArrowRight = (override?: string) =>
+ textInputRule({
+ find: /=>$/,
+ replace: override ?? "⇒",
+ });
+
+export const leftArrow = (override?: string) =>
+ textInputRule({
+ find: /<-$/,
+ replace: override ?? "←",
+ });
+
+export const rightArrow = (override?: string) =>
+ textInputRule({
+ find: /->$/,
+ replace: override ?? "→",
+ });
+
+export const ellipsis = (override?: string) =>
+ textInputRule({
+ find: /\.\.\.$/,
+ replace: override ?? "…",
+ });
+
+export const copyright = (override?: string) =>
+ textInputRule({
+ find: /\(c\)$/,
+ replace: override ?? "©",
+ });
+
+export const trademark = (override?: string) =>
+ textInputRule({
+ find: /\(tm\)$/,
+ replace: override ?? "™",
+ });
+
+export const servicemark = (override?: string) =>
+ textInputRule({
+ find: /\(sm\)$/,
+ replace: override ?? "℠",
+ });
+
+export const registeredTrademark = (override?: string) =>
+ textInputRule({
+ find: /\(r\)$/,
+ replace: override ?? "®",
+ });
+
+export const oneHalf = (override?: string) =>
+ textInputRule({
+ find: /(?:^|\s)(1\/2)\s$/,
+ replace: override ?? "½",
+ });
+
+export const plusMinus = (override?: string) =>
+ textInputRule({
+ find: /\+\/-$/,
+ replace: override ?? "±",
+ });
+
+export const notEqual = (override?: string) =>
+ textInputRule({
+ find: /!=$/,
+ replace: override ?? "≠",
+ });
+
+export const laquo = (override?: string) =>
+ textInputRule({
+ find: /<<$/,
+ replace: override ?? "«",
+ });
+
+export const raquo = (override?: string) =>
+ textInputRule({
+ find: />>$/,
+ replace: override ?? "»",
+ });
+
+export const multiplication = (override?: string) =>
+ textInputRule({
+ find: /\d+\s?([*x])\s?\d+$/,
+ replace: override ?? "×",
+ });
+
+export const superscriptTwo = (override?: string) =>
+ textInputRule({
+ find: /\^2$/,
+ replace: override ?? "²",
+ });
+
+export const superscriptThree = (override?: string) =>
+ textInputRule({
+ find: /\^3$/,
+ replace: override ?? "³",
+ });
+
+export const oneQuarter = (override?: string) =>
+ textInputRule({
+ find: /(?:^|\s)(1\/4)\s$/,
+ replace: override ?? "¼",
+ });
+
+export const threeQuarters = (override?: string) =>
+ textInputRule({
+ find: /(?:^|\s)(3\/4)\s$/,
+ replace: override ?? "¾",
+ });
diff --git a/packages/editor/core/src/ui/props.tsx b/packages/editor/core/src/ui/props.tsx
index 2aaeb4264cb..1846efe4751 100644
--- a/packages/editor/core/src/ui/props.tsx
+++ b/packages/editor/core/src/ui/props.tsx
@@ -42,15 +42,6 @@ export function CoreEditorProps(
return false;
},
handleDrop: (view, event, _slice, moved) => {
- if (typeof window !== "undefined") {
- const selection: any = window?.getSelection();
- if (selection.rangeCount !== 0) {
- const range = selection.getRangeAt(0);
- if (findTableAncestor(range.startContainer)) {
- return;
- }
- }
- }
if (!moved && event.dataTransfer && event.dataTransfer.files && event.dataTransfer.files[0]) {
event.preventDefault();
const file = event.dataTransfer.files[0];
diff --git a/packages/editor/document-editor/package.json b/packages/editor/document-editor/package.json
index b33bc12fbf4..bd1f2d90fea 100644
--- a/packages/editor/document-editor/package.json
+++ b/packages/editor/document-editor/package.json
@@ -1,6 +1,6 @@
{
"name": "@plane/document-editor",
- "version": "0.15.1",
+ "version": "0.16.0",
"description": "Package that powers Plane's Pages Editor",
"main": "./dist/index.mjs",
"module": "./dist/index.mjs",
diff --git a/packages/editor/document-editor/src/ui/components/links/link-edit-view.tsx b/packages/editor/document-editor/src/ui/components/links/link-edit-view.tsx
index 136d04e01e0..97191543991 100644
--- a/packages/editor/document-editor/src/ui/components/links/link-edit-view.tsx
+++ b/packages/editor/document-editor/src/ui/components/links/link-edit-view.tsx
@@ -40,9 +40,11 @@ export const LinkEditView = ({
const [positionRef, setPositionRef] = useState({ from: from, to: to });
const [localUrl, setLocalUrl] = useState(viewProps.url);
- const linkRemoved = useRef();
+ const linkRemoved = useRef();
const getText = (from: number, to: number) => {
+ if (to >= editor.state.doc.content.size) return "";
+
const text = editor.state.doc.textBetween(from, to, "\n");
return text;
};
@@ -72,10 +74,12 @@ export const LinkEditView = ({
const url = isValidUrl(localUrl) ? localUrl : viewProps.url;
+ if (to >= editor.state.doc.content.size) return;
+
editor.view.dispatch(editor.state.tr.removeMark(from, to, editor.schema.marks.link));
editor.view.dispatch(editor.state.tr.addMark(from, to, editor.schema.marks.link.create({ href: url })));
},
- [localUrl]
+ [localUrl, editor, from, to, viewProps.url]
);
const handleUpdateText = (text: string) => {
diff --git a/packages/editor/document-editor/src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-renderer.tsx b/packages/editor/document-editor/src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-renderer.tsx
index 869c7a8c6f3..e586bfd80cc 100644
--- a/packages/editor/document-editor/src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-renderer.tsx
+++ b/packages/editor/document-editor/src/ui/extensions/widgets/issue-embed-suggestion-list/issue-suggestion-renderer.tsx
@@ -145,7 +145,7 @@ const IssueSuggestionList = ({
{sections.map((section) => {
const sectionItems = displayedItems[section];
@@ -175,8 +175,8 @@ const IssueSuggestionList = ({
>
{item.identifier}
-
-
{item.title}
+
))}
diff --git a/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx b/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx
index be57a4a91c5..397e8c576dd 100644
--- a/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx
+++ b/packages/editor/document-editor/src/ui/menu/fixed-menu.tsx
@@ -48,34 +48,12 @@ export const FixedMenu = (props: EditorBubbleMenuProps) => {
function getComplexItems(): BubbleMenuItem[] {
const items: BubbleMenuItem[] = [TableItem(editor)];
- if (shouldShowImageItem()) {
- items.push(ImageItem(editor, uploadFile, setIsSubmitting));
- }
-
+ items.push(ImageItem(editor, uploadFile, setIsSubmitting));
return items;
}
const complexItems: BubbleMenuItem[] = getComplexItems();
- function shouldShowImageItem(): boolean {
- if (typeof window !== "undefined") {
- const selectionRange: any = window?.getSelection();
- const { selection } = props.editor.state;
-
- if (selectionRange.rangeCount !== 0) {
- const range = selectionRange.getRangeAt(0);
- if (findTableAncestor(range.startContainer)) {
- return false;
- }
- if (isCellSelection(selection)) {
- return false;
- }
- }
- return true;
- }
- return false;
- }
-
return (
diff --git a/packages/editor/extensions/package.json b/packages/editor/extensions/package.json
index 8481abdf31b..0bdd70824eb 100644
--- a/packages/editor/extensions/package.json
+++ b/packages/editor/extensions/package.json
@@ -1,6 +1,6 @@
{
"name": "@plane/editor-extensions",
- "version": "0.15.1",
+ "version": "0.16.0",
"description": "Package that powers Plane's Editor with extensions",
"private": true,
"main": "./dist/index.mjs",
diff --git a/packages/editor/extensions/src/extensions/drag-drop.tsx b/packages/editor/extensions/src/extensions/drag-drop.tsx
index af99fec61f6..ce4088413c3 100644
--- a/packages/editor/extensions/src/extensions/drag-drop.tsx
+++ b/packages/editor/extensions/src/extensions/drag-drop.tsx
@@ -35,7 +35,7 @@ export interface DragHandleOptions {
}
function absoluteRect(node: Element) {
- const data = node.getBoundingClientRect();
+ const data = node?.getBoundingClientRect();
return {
top: data.top,
@@ -65,7 +65,7 @@ function nodeDOMAtCoords(coords: { x: number; y: number }) {
}
function nodePosAtDOM(node: Element, view: EditorView) {
- const boundingRect = node.getBoundingClientRect();
+ const boundingRect = node?.getBoundingClientRect();
if (node.nodeName === "IMG") {
return view.posAtCoords({
diff --git a/packages/editor/lite-text-editor/package.json b/packages/editor/lite-text-editor/package.json
index 71d70399dc6..e033f620a6c 100644
--- a/packages/editor/lite-text-editor/package.json
+++ b/packages/editor/lite-text-editor/package.json
@@ -1,6 +1,6 @@
{
"name": "@plane/lite-text-editor",
- "version": "0.15.1",
+ "version": "0.16.0",
"description": "Package that powers Plane's Comment Editor",
"private": true,
"main": "./dist/index.mjs",
diff --git a/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx
index 71ad4e0e1c1..c6786698dd4 100644
--- a/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx
+++ b/packages/editor/lite-text-editor/src/ui/menus/fixed-menu/index.tsx
@@ -60,34 +60,13 @@ export const FixedMenu = (props: EditorBubbleMenuProps) => {
function getComplexItems(): BubbleMenuItem[] {
const items: BubbleMenuItem[] = [TableItem(props.editor)];
- if (shouldShowImageItem()) {
- items.push(ImageItem(props.editor, props.uploadFile, props.setIsSubmitting));
- }
+ items.push(ImageItem(props.editor, props.uploadFile, props.setIsSubmitting));
return items;
}
const complexItems: BubbleMenuItem[] = getComplexItems();
- function shouldShowImageItem(): boolean {
- if (typeof window !== "undefined") {
- const selectionRange: any = window?.getSelection();
- const { selection } = props.editor.state;
-
- if (selectionRange.rangeCount !== 0) {
- const range = selectionRange.getRangeAt(0);
- if (findTableAncestor(range.startContainer)) {
- return false;
- }
- if (isCellSelection(selection)) {
- return false;
- }
- }
- return true;
- }
- return false;
- }
-
const handleAccessChange = (accessKey: string) => {
props.commentAccessSpecifier?.onAccessChange(accessKey);
};
diff --git a/packages/editor/rich-text-editor/package.json b/packages/editor/rich-text-editor/package.json
index a85a8b998e5..0f3d0d8f70e 100644
--- a/packages/editor/rich-text-editor/package.json
+++ b/packages/editor/rich-text-editor/package.json
@@ -1,6 +1,6 @@
{
"name": "@plane/rich-text-editor",
- "version": "0.15.1",
+ "version": "0.16.0",
"description": "Rich Text Editor that powers Plane",
"private": true,
"main": "./dist/index.mjs",
diff --git a/packages/eslint-config-custom/package.json b/packages/eslint-config-custom/package.json
index 6bfe6726172..2fee408c9bc 100644
--- a/packages/eslint-config-custom/package.json
+++ b/packages/eslint-config-custom/package.json
@@ -1,7 +1,7 @@
{
"name": "eslint-config-custom",
"private": true,
- "version": "0.15.1",
+ "version": "0.16.0",
"main": "index.js",
"license": "MIT",
"dependencies": {
diff --git a/packages/tailwind-config-custom/package.json b/packages/tailwind-config-custom/package.json
index 50ede8674b9..d7e807b91f3 100644
--- a/packages/tailwind-config-custom/package.json
+++ b/packages/tailwind-config-custom/package.json
@@ -1,6 +1,6 @@
{
"name": "tailwind-config-custom",
- "version": "0.15.1",
+ "version": "0.16.0",
"description": "common tailwind configuration across monorepo",
"main": "index.js",
"private": true,
diff --git a/packages/tsconfig/package.json b/packages/tsconfig/package.json
index 42ce3fed5d4..e0829e87be1 100644
--- a/packages/tsconfig/package.json
+++ b/packages/tsconfig/package.json
@@ -1,6 +1,6 @@
{
"name": "tsconfig",
- "version": "0.15.1",
+ "version": "0.16.0",
"private": true,
"files": [
"base.json",
diff --git a/packages/types/package.json b/packages/types/package.json
index 0e5c2eb16f0..9c993884533 100644
--- a/packages/types/package.json
+++ b/packages/types/package.json
@@ -1,6 +1,6 @@
{
"name": "@plane/types",
- "version": "0.15.1",
+ "version": "0.16.0",
"private": true,
"main": "./src/index.d.ts"
}
diff --git a/packages/types/src/cycles.d.ts b/packages/types/src/cycles.d.ts
index 5d715385a0e..e7ec66ae212 100644
--- a/packages/types/src/cycles.d.ts
+++ b/packages/types/src/cycles.d.ts
@@ -30,10 +30,9 @@ export interface ICycle {
is_favorite: boolean;
issue: string;
name: string;
- owned_by: string;
+ owned_by_id: string;
progress_snapshot: TProgressSnapshot;
- project: string;
- project_detail: IProjectLite;
+ project_id: string;
status: TCycleGroups;
sort_order: number;
start_date: string | null;
@@ -42,12 +41,11 @@ export interface ICycle {
unstarted_issues: number;
updated_at: Date;
updated_by: string;
- assignees: IUserLite[];
+ assignee_ids: string[];
view_props: {
filters: IIssueFilterOptions;
};
- workspace: string;
- workspace_detail: IWorkspaceLite;
+ workspace_id: string;
}
export type TProgressSnapshot = {
diff --git a/packages/types/src/issues.d.ts b/packages/types/src/issues.d.ts
index 1f4a35dd470..ebe53713814 100644
--- a/packages/types/src/issues.d.ts
+++ b/packages/types/src/issues.d.ts
@@ -58,7 +58,6 @@ export interface IIssueLink {
export interface ILinkDetails {
created_at: Date;
created_by: string;
- created_by_detail: IUserLite;
id: string;
metadata: any;
title: string;
@@ -204,6 +203,8 @@ export interface ViewFlags {
export type GroupByColumnTypes =
| "project"
+ | "cycle"
+ | "module"
| "state"
| "state_detail.group"
| "priority"
diff --git a/packages/types/src/issues/issue.d.ts b/packages/types/src/issues/issue.d.ts
index 527abe63038..42c95dc4e30 100644
--- a/packages/types/src/issues/issue.d.ts
+++ b/packages/types/src/issues/issue.d.ts
@@ -1,4 +1,7 @@
import { TIssuePriorities } from "../issues";
+import { TIssueAttachment } from "./issue_attachment";
+import { TIssueLink } from "./issue_link";
+import { TIssueReaction } from "./issue_reaction";
// new issue structure types
export type TIssue = {
@@ -34,7 +37,12 @@ export type TIssue = {
updated_by: string;
is_draft: boolean;
- is_subscribed: boolean;
+ is_subscribed?: boolean;
+
+ parent?: partial
;
+ issue_reactions?: TIssueReaction[];
+ issue_attachment?: TIssueAttachment[];
+ issue_link?: TIssueLink[];
// tempId is used for optimistic updates. It is not a part of the API response.
tempId?: string;
diff --git a/packages/types/src/issues/issue_attachment.d.ts b/packages/types/src/issues/issue_attachment.d.ts
index 90daa08faeb..7c3819e004d 100644
--- a/packages/types/src/issues/issue_attachment.d.ts
+++ b/packages/types/src/issues/issue_attachment.d.ts
@@ -1,17 +1,15 @@
export type TIssueAttachment = {
id: string;
- created_at: string;
- updated_at: string;
attributes: {
name: string;
size: number;
};
asset: string;
- created_by: string;
+ issue_id: string;
+
+ //need
+ updated_at: string;
updated_by: string;
- project: string;
- workspace: string;
- issue: string;
};
export type TIssueAttachmentMap = {
diff --git a/packages/types/src/issues/issue_link.d.ts b/packages/types/src/issues/issue_link.d.ts
index 2c469e6829b..10f0d279204 100644
--- a/packages/types/src/issues/issue_link.d.ts
+++ b/packages/types/src/issues/issue_link.d.ts
@@ -4,11 +4,13 @@ export type TIssueLinkEditableFields = {
};
export type TIssueLink = TIssueLinkEditableFields & {
- created_at: Date;
- created_by: string;
- created_by_detail: IUserLite;
+ created_by_id: string;
id: string;
metadata: any;
+ issue_id: string;
+
+ //need
+ created_at: Date;
};
export type TIssueLinkMap = {
diff --git a/packages/types/src/issues/issue_reaction.d.ts b/packages/types/src/issues/issue_reaction.d.ts
index 88ef274261a..a4eaee0a876 100644
--- a/packages/types/src/issues/issue_reaction.d.ts
+++ b/packages/types/src/issues/issue_reaction.d.ts
@@ -1,15 +1,8 @@
export type TIssueReaction = {
- actor: string;
- actor_detail: IUserLite;
- created_at: Date;
- created_by: string;
+ actor_id: string;
id: string;
- issue: string;
- project: string;
+ issue_id: string;
reaction: string;
- updated_at: Date;
- updated_by: string;
- workspace: string;
};
export type TIssueReactionMap = {
diff --git a/packages/types/src/modules.d.ts b/packages/types/src/modules.d.ts
index 0e49da7fe07..fcf2d86a21a 100644
--- a/packages/types/src/modules.d.ts
+++ b/packages/types/src/modules.d.ts
@@ -27,16 +27,12 @@ export interface IModule {
labels: TLabelsDistribution[];
};
id: string;
- lead: string | null;
- lead_detail: IUserLite | null;
+ lead_id: string | null;
link_module: ILinkDetails[];
- links_list: ModuleLink[];
- members: string[];
- members_detail: IUserLite[];
+ member_ids: string[];
is_favorite: boolean;
name: string;
- project: string;
- project_detail: IProjectLite;
+ project_id: string;
sort_order: number;
start_date: string | null;
started_issues: number;
@@ -49,8 +45,7 @@ export interface IModule {
view_props: {
filters: IIssueFilterOptions;
};
- workspace: string;
- workspace_detail: IWorkspaceLite;
+ workspace_id: string;
}
export interface ModuleIssueResponse {
diff --git a/packages/types/src/notifications.d.ts b/packages/types/src/notifications.d.ts
index 8033c19a94e..652e2776f51 100644
--- a/packages/types/src/notifications.d.ts
+++ b/packages/types/src/notifications.d.ts
@@ -12,27 +12,27 @@ export interface PaginatedUserNotification {
}
export interface IUserNotification {
- id: string;
- created_at: Date;
- updated_at: Date;
+ archived_at: string | null;
+ created_at: string;
+ created_by: null;
data: Data;
entity_identifier: string;
entity_name: string;
- title: string;
+ id: string;
message: null;
message_html: string;
message_stripped: null;
- sender: string;
+ project: string;
read_at: Date | null;
- archived_at: Date | null;
+ receiver: string;
+ sender: string;
snoozed_till: Date | null;
- created_by: null;
- updated_by: null;
- workspace: string;
- project: string;
+ title: string;
triggered_by: string;
triggered_by_details: IUserLite;
- receiver: string;
+ updated_at: Date;
+ updated_by: null;
+ workspace: string;
}
export interface Data {
diff --git a/packages/types/src/view-props.d.ts b/packages/types/src/view-props.d.ts
index 61cc7081b29..c2c98def330 100644
--- a/packages/types/src/view-props.d.ts
+++ b/packages/types/src/view-props.d.ts
@@ -14,6 +14,8 @@ export type TIssueGroupByOptions =
| "project"
| "assignees"
| "mentions"
+ | "cycle"
+ | "module"
| null;
export type TIssueOrderByOptions =
@@ -30,6 +32,10 @@ export type TIssueOrderByOptions =
| "-assignees__first_name"
| "labels__name"
| "-labels__name"
+ | "modules__name"
+ | "-modules__name"
+ | "cycle__name"
+ | "-cycle__name"
| "target_date"
| "-target_date"
| "estimate_point"
@@ -56,6 +62,8 @@ export type TIssueParams =
| "created_by"
| "subscriber"
| "labels"
+ | "cycle"
+ | "module"
| "start_date"
| "target_date"
| "project"
@@ -75,6 +83,8 @@ export interface IIssueFilterOptions {
labels?: string[] | null;
priority?: string[] | null;
project?: string[] | null;
+ cycle?: string[] | null;
+ module?: string[] | null;
start_date?: string[] | null;
state?: string[] | null;
state_group?: string[] | null;
@@ -109,6 +119,8 @@ export interface IIssueDisplayProperties {
estimate?: boolean;
created_on?: boolean;
updated_on?: boolean;
+ modules?: boolean;
+ cycle?: boolean;
}
export type TIssueKanbanFilters = {
diff --git a/packages/ui/package.json b/packages/ui/package.json
index 912fcfeb8fb..756a0f2f1be 100644
--- a/packages/ui/package.json
+++ b/packages/ui/package.json
@@ -2,7 +2,7 @@
"name": "@plane/ui",
"description": "UI components shared across multiple apps internally",
"private": true,
- "version": "0.15.1",
+ "version": "0.16.0",
"main": "./dist/index.js",
"module": "./dist/index.mjs",
"types": "./dist/index.d.ts",
diff --git a/packages/ui/src/control-link/control-link.tsx b/packages/ui/src/control-link/control-link.tsx
index dbdbaf0950a..ee4b66d7b88 100644
--- a/packages/ui/src/control-link/control-link.tsx
+++ b/packages/ui/src/control-link/control-link.tsx
@@ -5,10 +5,11 @@ export type TControlLink = React.AnchorHTMLAttributes & {
onClick: () => void;
children: React.ReactNode;
target?: string;
+ disabled?: boolean;
};
export const ControlLink: React.FC = (props) => {
- const { href, onClick, children, target = "_self", ...rest } = props;
+ const { href, onClick, children, target = "_self", disabled = false, ...rest } = props;
const LEFT_CLICK_EVENT_CODE = 0;
const _onClick = (event: React.MouseEvent) => {
@@ -19,6 +20,8 @@ export const ControlLink: React.FC = (props) => {
}
};
+ if (disabled) return <>{children}>;
+
return (
{children}
diff --git a/packages/ui/src/dropdowns/custom-menu.tsx b/packages/ui/src/dropdowns/custom-menu.tsx
index 37aba932a59..d1623dddfdb 100644
--- a/packages/ui/src/dropdowns/custom-menu.tsx
+++ b/packages/ui/src/dropdowns/custom-menu.tsx
@@ -27,6 +27,7 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => {
noBorder = false,
noChevron = false,
optionsClassName = "",
+ menuItemsClassName = "",
verticalEllipsis = false,
portalElement,
menuButtonOnClick,
@@ -70,7 +71,7 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => {
useOutsideClickDetector(dropdownRef, closeDropdown);
let menuItems = (
-
+
{
};
const MenuItem: React.FC
= (props) => {
- const { children, onClick, className = "" } = props;
+ const { children, disabled = false, onClick, className } = props;
return (
-
+
{({ active, close }) => (
= (props) => {
close();
onClick && onClick(e);
}}
+ disabled={disabled}
>
{children}
diff --git a/packages/ui/src/dropdowns/custom-select.tsx b/packages/ui/src/dropdowns/custom-select.tsx
index 0fa183cb2ce..37608ea8db6 100644
--- a/packages/ui/src/dropdowns/custom-select.tsx
+++ b/packages/ui/src/dropdowns/custom-select.tsx
@@ -122,7 +122,7 @@ const Option = (props: ICustomSelectItemProps) => {
value={value}
className={({ active }) =>
cn(
- "cursor-pointer select-none truncate rounded px-1 py-1.5 text-custom-text-200",
+ "cursor-pointer select-none truncate rounded px-1 py-1.5 text-custom-text-200 flex items-center justify-between gap-2",
{
"bg-custom-background-80": active,
},
@@ -131,10 +131,10 @@ const Option = (props: ICustomSelectItemProps) => {
}
>
{({ selected }) => (
-
-
{children}
+ <>
+ {children}
{selected &&
}
-
+ >
)}
);
diff --git a/packages/ui/src/dropdowns/helper.tsx b/packages/ui/src/dropdowns/helper.tsx
index 930f332b9c0..93ac63b97b4 100644
--- a/packages/ui/src/dropdowns/helper.tsx
+++ b/packages/ui/src/dropdowns/helper.tsx
@@ -24,6 +24,7 @@ export interface ICustomMenuDropdownProps extends IDropdownProps {
noBorder?: boolean;
verticalEllipsis?: boolean;
menuButtonOnClick?: (...args: any) => void;
+ menuItemsClassName?: string;
onMenuClose?: () => void;
closeOnSelect?: boolean;
portalElement?: Element | null;
@@ -64,6 +65,7 @@ export type ICustomSearchSelectProps = IDropdownProps &
export interface ICustomMenuItemProps {
children: React.ReactNode;
+ disabled?: boolean;
onClick?: (args?: any) => void;
className?: string;
}
diff --git a/space/package.json b/space/package.json
index 9ee7279cd41..a1d600a60bf 100644
--- a/space/package.json
+++ b/space/package.json
@@ -1,6 +1,6 @@
{
"name": "space",
- "version": "0.15.1",
+ "version": "0.16.0",
"private": true,
"scripts": {
"dev": "turbo run develop",
diff --git a/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx b/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx
index ee677fe91e1..6a7b3c7b9d0 100644
--- a/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx
+++ b/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx
@@ -20,7 +20,8 @@ export const CustomAnalyticsSidebarHeader = observer(() => {
const cycleDetails = cycleId ? getCycleById(cycleId.toString()) : undefined;
const moduleDetails = moduleId ? getModuleById(moduleId.toString()) : undefined;
const projectDetails = projectId ? getProjectById(projectId.toString()) : undefined;
- const cycleOwnerDetails = cycleDetails ? getUserDetails(cycleDetails.owned_by) : undefined;
+ const cycleOwnerDetails = cycleDetails ? getUserDetails(cycleDetails.owned_by_id) : undefined;
+ const moduleLeadDetails = moduleDetails && moduleDetails.lead_id ? getUserDetails(moduleDetails.lead_id) : undefined;
return (
<>
@@ -57,7 +58,7 @@ export const CustomAnalyticsSidebarHeader = observer(() => {
Lead
- {moduleDetails.lead_detail?.display_name}
+ {moduleLeadDetails && {moduleLeadDetails?.display_name} }
Start Date
diff --git a/web/components/analytics/custom-analytics/sidebar/sidebar.tsx b/web/components/analytics/custom-analytics/sidebar/sidebar.tsx
index c2e12dc3c2c..3ad2805f28f 100644
--- a/web/components/analytics/custom-analytics/sidebar/sidebar.tsx
+++ b/web/components/analytics/custom-analytics/sidebar/sidebar.tsx
@@ -5,7 +5,7 @@ import { mutate } from "swr";
// services
import { AnalyticsService } from "services/analytics.service";
// hooks
-import { useCycle, useModule, useProject, useUser } from "hooks/store";
+import { useCycle, useModule, useProject, useUser, useWorkspace } from "hooks/store";
import useToast from "hooks/use-toast";
// components
import { CustomAnalyticsSidebarHeader, CustomAnalyticsSidebarProjectsList } from "components/analytics";
@@ -39,6 +39,8 @@ export const CustomAnalyticsSidebar: React.FC
= observer((props) => {
// store hooks
const { currentUser } = useUser();
const { workspaceProjectIds, getProjectById } = useProject();
+ const { getWorkspaceById } = useWorkspace();
+
const { fetchCycleDetails, getCycleById } = useCycle();
const { fetchModuleDetails, getModuleById } = useModule();
@@ -70,11 +72,14 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => {
if (cycleDetails || moduleDetails) {
const details = cycleDetails || moduleDetails;
- eventPayload.workspaceId = details?.workspace_detail?.id;
- eventPayload.workspaceName = details?.workspace_detail?.name;
- eventPayload.projectId = details?.project_detail.id;
- eventPayload.projectIdentifier = details?.project_detail.identifier;
- eventPayload.projectName = details?.project_detail.name;
+ const currentProjectDetails = getProjectById(details?.project_id || "");
+ const currentWorkspaceDetails = getWorkspaceById(details?.workspace_id || "");
+
+ eventPayload.workspaceId = details?.workspace_id;
+ eventPayload.workspaceName = currentWorkspaceDetails?.name;
+ eventPayload.projectId = details?.project_id;
+ eventPayload.projectIdentifier = currentProjectDetails?.identifier;
+ eventPayload.projectName = currentProjectDetails?.name;
}
if (cycleDetails) {
@@ -138,14 +143,18 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => {
const selectedProjects = params.project && params.project.length > 0 ? params.project : workspaceProjectIds;
-
return (
-
- {analytics ? analytics.total : "..."}
Issues
+ {analytics ? analytics.total : "..."}
+
Issues
{isProjectLevel && (
@@ -154,8 +163,8 @@ export const CustomAnalyticsSidebar: React.FC
= observer((props) => {
(cycleId
? cycleDetails?.created_at
: moduleId
- ? moduleDetails?.created_at
- : projectDetails?.created_at) ?? ""
+ ? moduleDetails?.created_at
+ : projectDetails?.created_at) ?? ""
)}
)}
diff --git a/web/components/analytics/scope-and-demand/scope-and-demand.tsx b/web/components/analytics/scope-and-demand/scope-and-demand.tsx
index 0f9e2c712d8..6f26ad73ff9 100644
--- a/web/components/analytics/scope-and-demand/scope-and-demand.tsx
+++ b/web/components/analytics/scope-and-demand/scope-and-demand.tsx
@@ -47,7 +47,7 @@ export const ScopeAndDemand: React.FC
= (props) => {
<>
{!defaultAnalyticsError ? (
defaultAnalytics ? (
-
+
diff --git a/web/components/api-token/modal/form.tsx b/web/components/api-token/modal/form.tsx
index ae7717b3933..77753e64d20 100644
--- a/web/components/api-token/modal/form.tsx
+++ b/web/components/api-token/modal/form.tsx
@@ -1,11 +1,10 @@
import { useState } from "react";
import { add } from "date-fns";
import { Controller, useForm } from "react-hook-form";
+import { DateDropdown } from "components/dropdowns";
import { Calendar } from "lucide-react";
// hooks
import useToast from "hooks/use-toast";
-// components
-import { CustomDatePicker } from "components/ui";
// ui
import { Button, CustomSelect, Input, TextArea, ToggleSwitch } from "@plane/ui";
// helpers
@@ -167,7 +166,7 @@ export const CreateApiTokenForm: React.FC
= (props) => {
@@ -194,20 +193,13 @@ export const CreateApiTokenForm: React.FC = (props) => {
}}
/>
{watch("expired_at") === "custom" && (
- setCustomDate(date ? new Date(date) : null)}
+ onChange={(date) => setCustomDate(date)}
minDate={tomorrow}
- customInput={
-
-
- {customDate ? renderFormattedDate(customDate) : "Set date"}
-
- }
+ icon={ }
+ buttonVariant="border-with-text"
+ placeholder="Set date"
disabled={neverExpires}
/>
)}
diff --git a/web/components/automation/auto-archive-automation.tsx b/web/components/automation/auto-archive-automation.tsx
index 974efff3a1a..d871b64d047 100644
--- a/web/components/automation/auto-archive-automation.tsx
+++ b/web/components/automation/auto-archive-automation.tsx
@@ -48,7 +48,7 @@ export const AutoArchiveAutomation: React.FC = observer((props) => {
Auto-archive closed issues
- Plane will auto archive issues that have been completed or cancelled.
+ Plane will auto archive issues that have been completed or canceled.
@@ -73,7 +73,7 @@ export const AutoArchiveAutomation: React.FC
= observer((props) => {
{
handleChange({ archive_in: val });
@@ -93,7 +93,7 @@ export const AutoArchiveAutomation: React.FC = observer((props) => {
className="flex w-full select-none items-center rounded px-1 py-1.5 text-sm text-custom-text-200 hover:bg-custom-background-80"
onClick={() => setmonthModal(true)}
>
- Customise Time Range
+ Customize time range
>
diff --git a/web/components/automation/auto-close-automation.tsx b/web/components/automation/auto-close-automation.tsx
index 8d6662c112d..2ae4d1f9c65 100644
--- a/web/components/automation/auto-close-automation.tsx
+++ b/web/components/automation/auto-close-automation.tsx
@@ -74,7 +74,7 @@ export const AutoCloseAutomation: React.FC = observer((props) => {
Auto-close issues
- Plane will automatically close issue that haven{"'"}t been completed or cancelled.
+ Plane will automatically close issue that haven{"'"}t been completed or canceled.
@@ -100,7 +100,7 @@ export const AutoCloseAutomation: React.FC
= observer((props) => {
{
handleChange({ close_in: val });
@@ -119,7 +119,7 @@ export const AutoCloseAutomation: React.FC = observer((props) => {
className="flex w-full select-none items-center rounded px-1 py-1.5 text-custom-text-200 hover:bg-custom-background-80"
onClick={() => setmonthModal(true)}
>
- Customize Time Range
+ Customize time range
>
diff --git a/web/components/automation/select-month-modal.tsx b/web/components/automation/select-month-modal.tsx
index 1d306bb0401..01d07f64a82 100644
--- a/web/components/automation/select-month-modal.tsx
+++ b/web/components/automation/select-month-modal.tsx
@@ -72,7 +72,7 @@ export const SelectMonthModal: React.FC = ({ type, initialValues, isOpen,