diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
index 38694a62ea5..603f08e9422 100644
--- a/.github/workflows/build-branch.yml
+++ b/.github/workflows/build-branch.yml
@@ -2,11 +2,6 @@ name: Branch Build
on:
workflow_dispatch:
- inputs:
- branch_name:
- description: "Branch Name"
- required: true
- default: "preview"
push:
branches:
- master
@@ -16,49 +11,71 @@ on:
types: [released, prereleased]
env:
- TARGET_BRANCH: ${{ inputs.branch_name || github.ref_name || github.event.release.target_commitish }}
+ TARGET_BRANCH: ${{ github.ref_name || github.event.release.target_commitish }}
jobs:
branch_build_setup:
name: Build-Push Web/Space/API/Proxy Docker Image
- runs-on: ubuntu-20.04
- steps:
- - name: Check out the repo
- uses: actions/checkout@v3.3.0
+ runs-on: ubuntu-latest
outputs:
- gh_branch_name: ${{ env.TARGET_BRANCH }}
+ gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
+ gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
+ gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
+ gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
+ gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
+
+ steps:
+ - id: set_env_variables
+ name: Set Environment Variables
+ run: |
+ if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
+ echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
+ echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
+ echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
+ echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
+ else
+ echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
+ echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
+ echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
+ echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
+ fi
+ echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
branch_build_push_frontend:
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Frontend Docker Tag
run: |
- if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
- elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable
else
TAG=${{ env.FRONTEND_TAG }}
fi
echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
- - name: Docker Setup QEMU
- uses: docker/setup-qemu-action@v3.0.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.0.0
- with:
- platforms: linux/amd64,linux/arm64
- buildkitd-flags: "--allow-insecure-entitlement security.insecure"
- name: Login to Docker Hub
- uses: docker/login-action@v3.0.0
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver: ${{ env.BUILDX_DRIVER }}
+ version: ${{ env.BUILDX_VERSION }}
+ endpoint: ${{ env.BUILDX_ENDPOINT }}
+
- name: Check out the repo
uses: actions/checkout@v4.1.1
@@ -67,7 +84,7 @@ jobs:
with:
context: .
file: ./web/Dockerfile.web
- platforms: linux/amd64
+ platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.FRONTEND_TAG }}
push: true
env:
@@ -80,33 +97,36 @@ jobs:
needs: [branch_build_setup]
env:
SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Space Docker Tag
run: |
- if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
- elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable
else
TAG=${{ env.SPACE_TAG }}
fi
echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
- - name: Docker Setup QEMU
- uses: docker/setup-qemu-action@v3.0.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.0.0
- with:
- platforms: linux/amd64,linux/arm64
- buildkitd-flags: "--allow-insecure-entitlement security.insecure"
-
- name: Login to Docker Hub
- uses: docker/login-action@v3.0.0
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver: ${{ env.BUILDX_DRIVER }}
+ version: ${{ env.BUILDX_VERSION }}
+ endpoint: ${{ env.BUILDX_ENDPOINT }}
+
- name: Check out the repo
uses: actions/checkout@v4.1.1
@@ -115,7 +135,7 @@ jobs:
with:
context: .
file: ./space/Dockerfile.space
- platforms: linux/amd64
+ platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.SPACE_TAG }}
push: true
env:
@@ -128,33 +148,36 @@ jobs:
needs: [branch_build_setup]
env:
BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Backend Docker Tag
run: |
- if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
- elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable
else
TAG=${{ env.BACKEND_TAG }}
fi
echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
- - name: Docker Setup QEMU
- uses: docker/setup-qemu-action@v3.0.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.0.0
- with:
- platforms: linux/amd64,linux/arm64
- buildkitd-flags: "--allow-insecure-entitlement security.insecure"
-
- name: Login to Docker Hub
- uses: docker/login-action@v3.0.0
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver: ${{ env.BUILDX_DRIVER }}
+ version: ${{ env.BUILDX_VERSION }}
+ endpoint: ${{ env.BUILDX_ENDPOINT }}
+
- name: Check out the repo
uses: actions/checkout@v4.1.1
@@ -163,7 +186,7 @@ jobs:
with:
context: ./apiserver
file: ./apiserver/Dockerfile.api
- platforms: linux/amd64
+ platforms: ${{ env.BUILDX_PLATFORMS }}
push: true
tags: ${{ env.BACKEND_TAG }}
env:
@@ -171,38 +194,42 @@ jobs:
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
branch_build_push_proxy:
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ TARGET_BRANCH: ${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
steps:
- name: Set Proxy Docker Tag
run: |
- if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
- elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable
else
TAG=${{ env.PROXY_TAG }}
fi
echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
- - name: Docker Setup QEMU
- uses: docker/setup-qemu-action@v3.0.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v3.0.0
- with:
- platforms: linux/amd64,linux/arm64
- buildkitd-flags: "--allow-insecure-entitlement security.insecure"
-
- name: Login to Docker Hub
- uses: docker/login-action@v3.0.0
+ uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver: ${{ env.BUILDX_DRIVER }}
+ version: ${{ env.BUILDX_VERSION }}
+ endpoint: ${{ env.BUILDX_ENDPOINT }}
+
- name: Check out the repo
uses: actions/checkout@v4.1.1
@@ -211,10 +238,11 @@ jobs:
with:
context: ./nginx
file: ./nginx/Dockerfile
- platforms: linux/amd64
+ platforms: ${{ env.BUILDX_PLATFORMS }}
tags: ${{ env.PROXY_TAG }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
diff --git a/apiserver/package.json b/apiserver/package.json
index 120314ed398..fb4f8441d43 100644
--- a/apiserver/package.json
+++ b/apiserver/package.json
@@ -1,4 +1,4 @@
{
"name": "plane-api",
- "version": "0.15.0"
+ "version": "0.15.1"
}
diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py
index b069ef78c1a..edb89f9b187 100644
--- a/apiserver/plane/api/views/base.py
+++ b/apiserver/plane/api/views/base.py
@@ -1,6 +1,8 @@
# Python imports
import zoneinfo
import json
+from urllib.parse import urlparse
+
# Django imports
from django.conf import settings
@@ -51,6 +53,11 @@ def finalize_response(self, request, response, *args, **kwargs):
and self.request.method in ["POST", "PATCH", "DELETE"]
and response.status_code in [200, 201, 204]
):
+ url = request.build_absolute_uri()
+ parsed_url = urlparse(url)
+ # Extract the scheme and netloc
+ scheme = parsed_url.scheme
+ netloc = parsed_url.netloc
# Push the object to delay
send_webhook.delay(
event=self.webhook_event,
@@ -59,6 +66,7 @@ def finalize_response(self, request, response, *args, **kwargs):
action=self.request.method,
slug=self.workspace_slug,
bulk=self.bulk,
+ current_site=f"{scheme}://{netloc}",
)
return response
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index c296bb11180..6f66c373ec6 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -243,6 +243,29 @@ def post(self, request, slug, project_id):
):
serializer = CycleSerializer(data=request.data)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Cycle.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ cycle = Cycle.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).first()
+ return Response(
+ {
+ "error": "Cycle with the same external id and external source already exists",
+ "id": str(cycle.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
serializer.save(
project_id=project_id,
owned_by=request.user,
@@ -289,6 +312,23 @@ def patch(self, request, slug, project_id, pk):
serializer = CycleSerializer(cycle, data=request.data, partial=True)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and (cycle.external_id != request.data.get("external_id"))
+ and Cycle.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", cycle.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Cycle with the same external id and external source already exists",
+ "id": str(cycle.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index e91f2a5f66f..a759b15f6e0 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -220,6 +220,30 @@ def post(self, request, slug, project_id):
)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Issue.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ issue = Issue.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "Issue with the same external id and external source already exists",
+ "id": str(issue.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
serializer.save()
# Track the issue
@@ -256,6 +280,26 @@ def patch(self, request, slug, project_id, pk=None):
partial=True,
)
if serializer.is_valid():
+ if (
+ str(request.data.get("external_id"))
+ and (issue.external_id != str(request.data.get("external_id")))
+ and Issue.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get(
+ "external_source", issue.external_source
+ ),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Issue with the same external id and external source already exists",
+ "id": str(issue.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
serializer.save()
issue_activity.delay(
type="issue.activity.updated",
@@ -263,6 +307,8 @@ def patch(self, request, slug, project_id, pk=None):
actor_id=str(request.user.id),
issue_id=str(pk),
project_id=str(project_id),
+ external_id__isnull=False,
+ external_source__isnull=False,
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
)
@@ -318,6 +364,30 @@ def post(self, request, slug, project_id):
try:
serializer = LabelSerializer(data=request.data)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Label.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ label = Label.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "Label with the same external id and external source already exists",
+ "id": str(label.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
serializer.save(project_id=project_id)
return Response(
serializer.data, status=status.HTTP_201_CREATED
@@ -326,11 +396,17 @@ def post(self, request, slug, project_id):
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
except IntegrityError:
+ label = Label.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ name=request.data.get("name"),
+ ).first()
return Response(
{
- "error": "Label with the same name already exists in the project"
+ "error": "Label with the same name already exists in the project",
+ "id": str(label.id),
},
- status=status.HTTP_400_BAD_REQUEST,
+ status=status.HTTP_409_CONFLICT,
)
def get(self, request, slug, project_id, pk=None):
@@ -357,6 +433,25 @@ def patch(self, request, slug, project_id, pk=None):
label = self.get_queryset().get(pk=pk)
serializer = LabelSerializer(label, data=request.data, partial=True)
if serializer.is_valid():
+ if (
+ str(request.data.get("external_id"))
+ and (label.external_id != str(request.data.get("external_id")))
+ and Issue.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get(
+ "external_source", label.external_source
+ ),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Label with the same external id and external source already exists",
+ "id": str(label.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index 1a9a21a3c19..d509a53c79d 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -132,6 +132,29 @@ def post(self, request, slug, project_id):
},
)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Module.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ module = Module.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).first()
+ return Response(
+ {
+ "error": "Module with the same external id and external source already exists",
+ "id": str(module.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
serializer.save()
module = Module.objects.get(pk=serializer.data["id"])
serializer = ModuleSerializer(module)
@@ -149,8 +172,25 @@ def patch(self, request, slug, project_id, pk):
partial=True,
)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and (module.external_id != request.data.get("external_id"))
+ and Module.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", module.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Module with the same external id and external source already exists",
+ "id": str(module.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
serializer.save()
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get(self, request, slug, project_id, pk=None):
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index f931c2ed264..0a262a071d4 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -38,6 +38,30 @@ def post(self, request, slug, project_id):
data=request.data, context={"project_id": project_id}
)
if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and State.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ state = State.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "State with the same external id and external source already exists",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -91,6 +115,23 @@ def patch(self, request, slug, project_id, state_id=None):
)
serializer = StateSerializer(state, data=request.data, partial=True)
if serializer.is_valid():
+ if (
+ str(request.data.get("external_id"))
+ and (state.external_id != str(request.data.get("external_id")))
+ and State.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", state.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "State with the same external id and external source already exists",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py
index 0d72f919241..28e88106031 100644
--- a/apiserver/plane/app/serializers/__init__.py
+++ b/apiserver/plane/app/serializers/__init__.py
@@ -68,6 +68,7 @@
IssueRelationSerializer,
RelatedIssueSerializer,
IssuePublicSerializer,
+ IssueDetailSerializer,
)
from .module import (
diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py
index be98bc312eb..90069bd41bd 100644
--- a/apiserver/plane/app/serializers/issue.py
+++ b/apiserver/plane/app/serializers/issue.py
@@ -586,7 +586,6 @@ class Meta:
"id",
"name",
"state_id",
- "description_html",
"sort_order",
"completed_at",
"estimate_point",
@@ -618,6 +617,13 @@ def get_module_ids(self, obj):
return [module for module in obj.issue_module.values_list("module_id", flat=True)]
+class IssueDetailSerializer(IssueSerializer):
+ description_html = serializers.CharField()
+
+ class Meta(IssueSerializer.Meta):
+ fields = IssueSerializer.Meta.fields + ['description_html']
+
+
class IssueLiteSerializer(DynamicBaseSerializer):
workspace_detail = WorkspaceLiteSerializer(
read_only=True, source="workspace"
diff --git a/apiserver/plane/app/views/auth_extended.py b/apiserver/plane/app/views/auth_extended.py
index 501f4765788..29cb43e3865 100644
--- a/apiserver/plane/app/views/auth_extended.py
+++ b/apiserver/plane/app/views/auth_extended.py
@@ -401,8 +401,8 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
- medium="MAGIC_LINK",
+ event_name="Sign up",
+ medium="Magic link",
first_time=True,
)
key, token, current_attempt = generate_magic_token(email=email)
@@ -438,8 +438,8 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
- medium="MAGIC_LINK",
+ event_name="Sign in",
+ medium="Magic link",
first_time=False,
)
@@ -468,8 +468,8 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
- medium="EMAIL",
+ event_name="Sign in",
+ medium="Email",
first_time=False,
)
diff --git a/apiserver/plane/app/views/authentication.py b/apiserver/plane/app/views/authentication.py
index a41200d61a0..c2b3e0b7e4e 100644
--- a/apiserver/plane/app/views/authentication.py
+++ b/apiserver/plane/app/views/authentication.py
@@ -274,8 +274,8 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
- medium="EMAIL",
+ event_name="Sign in",
+ medium="Email",
first_time=False,
)
@@ -349,8 +349,8 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
- medium="MAGIC_LINK",
+ event_name="Sign in",
+ medium="Magic link",
first_time=False,
)
diff --git a/apiserver/plane/app/views/base.py b/apiserver/plane/app/views/base.py
index e07cb811cc8..fa1e7559b06 100644
--- a/apiserver/plane/app/views/base.py
+++ b/apiserver/plane/app/views/base.py
@@ -64,6 +64,7 @@ def finalize_response(self, request, response, *args, **kwargs):
action=self.request.method,
slug=self.workspace_slug,
bulk=self.bulk,
+ current_site=request.META.get("HTTP_ORIGIN"),
)
return response
diff --git a/apiserver/plane/app/views/cycle.py b/apiserver/plane/app/views/cycle.py
index 23a227fefb7..63d8d28aea6 100644
--- a/apiserver/plane/app/views/cycle.py
+++ b/apiserver/plane/app/views/cycle.py
@@ -20,6 +20,7 @@
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
+from django.core.serializers.json import DjangoJSONEncoder
# Third party imports
from rest_framework.response import Response
@@ -242,13 +243,13 @@ def list(self, request, slug, project_id):
.values("display_name", "assignee_id", "avatar")
.annotate(
total_issues=Count(
- "assignee_id",
+ "id",
filter=Q(archived_at__isnull=True, is_draft=False),
),
)
.annotate(
completed_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
@@ -258,7 +259,7 @@ def list(self, request, slug, project_id):
)
.annotate(
pending_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
@@ -281,13 +282,13 @@ def list(self, request, slug, project_id):
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
- "label_id",
+ "id",
filter=Q(archived_at__isnull=True, is_draft=False),
)
)
.annotate(
completed_issues=Count(
- "label_id",
+ "id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
@@ -297,7 +298,7 @@ def list(self, request, slug, project_id):
)
.annotate(
pending_issues=Count(
- "label_id",
+ "id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
@@ -312,6 +313,7 @@ def list(self, request, slug, project_id):
"labels": label_distribution,
"completion_chart": {},
}
+
if data[0]["start_date"] and data[0]["end_date"]:
data[0]["distribution"][
"completion_chart"
@@ -419,13 +421,13 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
total_issues=Count(
- "assignee_id",
+ "id",
filter=Q(archived_at__isnull=True, is_draft=False),
),
)
.annotate(
completed_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
@@ -435,7 +437,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
pending_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
@@ -459,13 +461,13 @@ def retrieve(self, request, slug, project_id, pk):
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
- "label_id",
+ "id",
filter=Q(archived_at__isnull=True, is_draft=False),
),
)
.annotate(
completed_issues=Count(
- "label_id",
+ "id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
@@ -475,7 +477,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
pending_issues=Count(
- "label_id",
+ "id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
@@ -840,10 +842,230 @@ def post(self, request, slug, project_id, cycle_id):
status=status.HTTP_400_BAD_REQUEST,
)
- new_cycle = Cycle.objects.get(
+ new_cycle = Cycle.objects.filter(
workspace__slug=slug, project_id=project_id, pk=new_cycle_id
+ ).first()
+
+ old_cycle = (
+ Cycle.objects.filter(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ total_estimates=Sum("issue_cycle__issue__estimate_point")
+ )
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ ),
+ )
+ )
+ )
+
+ # Pass the new_cycle queryset to burndown_plot
+ completion_chart = burndown_plot(
+ queryset=old_cycle.first(),
+ slug=slug,
+ project_id=project_id,
+ cycle_id=cycle_id,
+ )
+
+ assignee_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=cycle_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(display_name=F("assignees__display_name"))
+ .annotate(assignee_id=F("assignees__id"))
+ .annotate(avatar=F("assignees__avatar"))
+ .values("display_name", "assignee_id", "avatar")
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ ),
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("display_name")
+ )
+
+ label_distribution = (
+ Issue.objects.filter(
+ issue_cycle__cycle_id=cycle_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ .annotate(label_name=F("labels__name"))
+ .annotate(color=F("labels__color"))
+ .annotate(label_id=F("labels__id"))
+ .values("label_name", "color", "label_id")
+ .annotate(
+ total_issues=Count(
+ "id",
+ filter=Q(archived_at__isnull=True, is_draft=False),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=False,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .annotate(
+ pending_issues=Count(
+ "id",
+ filter=Q(
+ completed_at__isnull=True,
+ archived_at__isnull=True,
+ is_draft=False,
+ ),
+ )
+ )
+ .order_by("label_name")
)
+ assignee_distribution_data = [
+ {
+ "display_name": item["display_name"],
+ "assignee_id": str(item["assignee_id"]) if item["assignee_id"] else None,
+ "avatar": item["avatar"],
+ "total_issues": item["total_issues"],
+ "completed_issues": item["completed_issues"],
+ "pending_issues": item["pending_issues"],
+ }
+ for item in assignee_distribution
+ ]
+
+ label_distribution_data = [
+ {
+ "label_name": item["label_name"],
+ "color": item["color"],
+ "label_id": str(item["label_id"]) if item["label_id"] else None,
+ "total_issues": item["total_issues"],
+ "completed_issues": item["completed_issues"],
+ "pending_issues": item["pending_issues"],
+ }
+ for item in label_distribution
+ ]
+
+ current_cycle = Cycle.objects.filter(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ ).first()
+
+ current_cycle.progress_snapshot = {
+ "total_issues": old_cycle.first().total_issues,
+ "completed_issues": old_cycle.first().completed_issues,
+ "cancelled_issues": old_cycle.first().cancelled_issues,
+ "started_issues": old_cycle.first().started_issues,
+ "unstarted_issues": old_cycle.first().unstarted_issues,
+ "backlog_issues": old_cycle.first().backlog_issues,
+ "total_estimates": old_cycle.first().total_estimates,
+ "completed_estimates": old_cycle.first().completed_estimates,
+ "started_estimates": old_cycle.first().started_estimates,
+ "distribution":{
+ "labels": label_distribution_data,
+ "assignees": assignee_distribution_data,
+ "completion_chart": completion_chart,
+ },
+ }
+ current_cycle.save(update_fields=["progress_snapshot"])
+
if (
new_cycle.end_date is not None
and new_cycle.end_date < timezone.now().date()
diff --git a/apiserver/plane/app/views/dashboard.py b/apiserver/plane/app/views/dashboard.py
index 47fae2c9ca1..1366a2886a9 100644
--- a/apiserver/plane/app/views/dashboard.py
+++ b/apiserver/plane/app/views/dashboard.py
@@ -145,6 +145,23 @@ def dashboard_assigned_issues(self, request, slug):
)
).order_by("priority_order")
+ if issue_type == "pending":
+ pending_issues_count = assigned_issues.filter(
+ state__group__in=["backlog", "started", "unstarted"]
+ ).count()
+ pending_issues = assigned_issues.filter(
+ state__group__in=["backlog", "started", "unstarted"]
+ )[:5]
+ return Response(
+ {
+ "issues": IssueSerializer(
+ pending_issues, many=True, expand=self.expand
+ ).data,
+ "count": pending_issues_count,
+ },
+ status=status.HTTP_200_OK,
+ )
+
if issue_type == "completed":
completed_issues_count = assigned_issues.filter(
state__group__in=["completed"]
@@ -257,6 +274,23 @@ def dashboard_created_issues(self, request, slug):
)
).order_by("priority_order")
+ if issue_type == "pending":
+ pending_issues_count = created_issues.filter(
+ state__group__in=["backlog", "started", "unstarted"]
+ ).count()
+ pending_issues = created_issues.filter(
+ state__group__in=["backlog", "started", "unstarted"]
+ )[:5]
+ return Response(
+ {
+ "issues": IssueSerializer(
+ pending_issues, many=True, expand=self.expand
+ ).data,
+ "count": pending_issues_count,
+ },
+ status=status.HTTP_200_OK,
+ )
+
if issue_type == "completed":
completed_issues_count = created_issues.filter(
state__group__in=["completed"]
diff --git a/apiserver/plane/app/views/issue.py b/apiserver/plane/app/views/issue.py
index 0b5c612d399..c8845150a52 100644
--- a/apiserver/plane/app/views/issue.py
+++ b/apiserver/plane/app/views/issue.py
@@ -50,6 +50,7 @@
CommentReactionSerializer,
IssueRelationSerializer,
RelatedIssueSerializer,
+ IssueDetailSerializer,
)
from plane.app.permissions import (
ProjectEntityPermission,
@@ -267,7 +268,7 @@ def create(self, request, slug, project_id):
def retrieve(self, request, slug, project_id, pk=None):
issue = self.get_queryset().filter(pk=pk).first()
return Response(
- IssueSerializer(
+ IssueDetailSerializer(
issue, fields=self.fields, expand=self.expand
).data,
status=status.HTTP_200_OK,
@@ -1668,15 +1669,9 @@ class IssueDraftViewSet(BaseViewSet):
def get_queryset(self):
return (
- Issue.objects.annotate(
- sub_issues_count=Issue.issue_objects.filter(
- parent=OuterRef("id")
- )
- .order_by()
- .annotate(count=Func(F("id"), function="Count"))
- .values("count")
+ Issue.objects.filter(
+ project_id=self.kwargs.get("project_id")
)
- .filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(is_draft=True)
.select_related("workspace", "project", "state", "parent")
@@ -1710,7 +1705,7 @@ def get_queryset(self):
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
- )
+ ).distinct()
@method_decorator(gzip_page)
def list(self, request, slug, project_id):
@@ -1832,7 +1827,10 @@ def create(self, request, slug, project_id):
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
- return Response(serializer.data, status=status.HTTP_201_CREATED)
+ issue = (
+ self.get_queryset().filter(pk=serializer.data["id"]).first()
+ )
+ return Response(IssueSerializer(issue).data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def partial_update(self, request, slug, project_id, pk):
@@ -1868,10 +1866,13 @@ def partial_update(self, request, slug, project_id, pk):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def retrieve(self, request, slug, project_id, pk=None):
- issue = Issue.objects.get(
- workspace__slug=slug, project_id=project_id, pk=pk, is_draft=True
+ issue = self.get_queryset().filter(pk=pk).first()
+ return Response(
+ IssueSerializer(
+ issue, fields=self.fields, expand=self.expand
+ ).data,
+ status=status.HTTP_200_OK,
)
- return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(
diff --git a/apiserver/plane/app/views/module.py b/apiserver/plane/app/views/module.py
index 1f055129a90..4792a1f7996 100644
--- a/apiserver/plane/app/views/module.py
+++ b/apiserver/plane/app/views/module.py
@@ -197,7 +197,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
total_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
@@ -206,7 +206,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
completed_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
@@ -216,7 +216,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
pending_issues=Count(
- "assignee_id",
+ "id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
@@ -239,7 +239,7 @@ def retrieve(self, request, slug, project_id, pk):
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
- "label_id",
+ "id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
@@ -248,7 +248,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
completed_issues=Count(
- "label_id",
+ "id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
@@ -258,7 +258,7 @@ def retrieve(self, request, slug, project_id, pk):
)
.annotate(
pending_issues=Count(
- "label_id",
+ "id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
@@ -334,7 +334,7 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
def get_queryset(self):
return (
- Issue.objects.filter(
+ Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
issue_module__module_id=self.kwargs.get("module_id")
diff --git a/apiserver/plane/app/views/oauth.py b/apiserver/plane/app/views/oauth.py
index de90e433740..8152fb0eee4 100644
--- a/apiserver/plane/app/views/oauth.py
+++ b/apiserver/plane/app/views/oauth.py
@@ -296,7 +296,7 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
+ event_name="Sign in",
medium=medium.upper(),
first_time=False,
)
@@ -427,7 +427,7 @@ def post(self, request):
email=email,
user_agent=request.META.get("HTTP_USER_AGENT"),
ip=request.META.get("REMOTE_ADDR"),
- event_name="SIGN_IN",
+ event_name="Sign up",
medium=medium.upper(),
first_time=True,
)
diff --git a/apiserver/plane/bgtasks/email_notification_task.py b/apiserver/plane/bgtasks/email_notification_task.py
index 713835033f6..9e9b348e197 100644
--- a/apiserver/plane/bgtasks/email_notification_task.py
+++ b/apiserver/plane/bgtasks/email_notification_task.py
@@ -1,5 +1,6 @@
-import json
from datetime import datetime
+from bs4 import BeautifulSoup
+
# Third party imports
from celery import shared_task
@@ -9,7 +10,6 @@
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
-from django.conf import settings
# Module imports
from plane.db.models import EmailNotificationLog, User, Issue
@@ -40,7 +40,7 @@ def stack_email_notification():
processed_notifications = []
# Loop through all the issues to create the emails
for receiver_id in receivers:
- # Notifcation triggered for the receiver
+ # Notification triggered for the receiver
receiver_notifications = [
notification
for notification in email_notifications
@@ -124,119 +124,153 @@ def create_payload(notification_data):
return data
+def process_mention(mention_component):
+ soup = BeautifulSoup(mention_component, 'html.parser')
+ mentions = soup.find_all('mention-component')
+ for mention in mentions:
+ user_id = mention['id']
+ user = User.objects.get(pk=user_id)
+ user_name = user.display_name
+ highlighted_name = f"@{user_name}"
+ mention.replace_with(highlighted_name)
+ return str(soup)
+
+def process_html_content(content):
+ processed_content_list = []
+ for html_content in content:
+ processed_content = process_mention(html_content)
+ processed_content_list.append(processed_content)
+ return processed_content_list
@shared_task
def send_email_notification(
issue_id, notification_data, receiver_id, email_notification_ids
):
- ri = redis_instance()
- base_api = (ri.get(str(issue_id)).decode())
- data = create_payload(notification_data=notification_data)
-
- # Get email configurations
- (
- EMAIL_HOST,
- EMAIL_HOST_USER,
- EMAIL_HOST_PASSWORD,
- EMAIL_PORT,
- EMAIL_USE_TLS,
- EMAIL_FROM,
- ) = get_email_configuration()
-
- receiver = User.objects.get(pk=receiver_id)
- issue = Issue.objects.get(pk=issue_id)
- template_data = []
- total_changes = 0
- comments = []
- actors_involved = []
- for actor_id, changes in data.items():
- actor = User.objects.get(pk=actor_id)
- total_changes = total_changes + len(changes)
- comment = changes.pop("comment", False)
- actors_involved.append(actor_id)
- if comment:
- comments.append(
- {
- "actor_comments": comment,
- "actor_detail": {
- "avatar_url": actor.avatar,
- "first_name": actor.first_name,
- "last_name": actor.last_name,
- },
- }
+ try:
+ ri = redis_instance()
+ base_api = (ri.get(str(issue_id)).decode())
+ data = create_payload(notification_data=notification_data)
+
+ # Get email configurations
+ (
+ EMAIL_HOST,
+ EMAIL_HOST_USER,
+ EMAIL_HOST_PASSWORD,
+ EMAIL_PORT,
+ EMAIL_USE_TLS,
+ EMAIL_FROM,
+ ) = get_email_configuration()
+
+ receiver = User.objects.get(pk=receiver_id)
+ issue = Issue.objects.get(pk=issue_id)
+ template_data = []
+ total_changes = 0
+ comments = []
+ actors_involved = []
+ for actor_id, changes in data.items():
+ actor = User.objects.get(pk=actor_id)
+ total_changes = total_changes + len(changes)
+ comment = changes.pop("comment", False)
+ mention = changes.pop("mention", False)
+ actors_involved.append(actor_id)
+ if comment:
+ comments.append(
+ {
+ "actor_comments": comment,
+ "actor_detail": {
+ "avatar_url": actor.avatar,
+ "first_name": actor.first_name,
+ "last_name": actor.last_name,
+ },
+ }
+ )
+ if mention:
+ mention["new_value"] = process_html_content(mention.get("new_value"))
+ mention["old_value"] = process_html_content(mention.get("old_value"))
+ comments.append(
+ {
+ "actor_comments": mention,
+ "actor_detail": {
+ "avatar_url": actor.avatar,
+ "first_name": actor.first_name,
+ "last_name": actor.last_name,
+ },
+ }
+ )
+ activity_time = changes.pop("activity_time")
+ # Parse the input string into a datetime object
+ formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p")
+
+ if changes:
+ template_data.append(
+ {
+ "actor_detail": {
+ "avatar_url": actor.avatar,
+ "first_name": actor.first_name,
+ "last_name": actor.last_name,
+ },
+ "changes": changes,
+ "issue_details": {
+ "name": issue.name,
+ "identifier": f"{issue.project.identifier}-{issue.sequence_id}",
+ },
+ "activity_time": str(formatted_time),
+ }
)
- activity_time = changes.pop("activity_time")
- # Parse the input string into a datetime object
- formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p")
-
- if changes:
- template_data.append(
- {
- "actor_detail": {
- "avatar_url": actor.avatar,
- "first_name": actor.first_name,
- "last_name": actor.last_name,
- },
- "changes": changes,
- "issue_details": {
- "name": issue.name,
- "identifier": f"{issue.project.identifier}-{issue.sequence_id}",
- },
- "activity_time": str(formatted_time),
- }
- )
- summary = "Updates were made to the issue by"
-
- # Send the mail
- subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
- context = {
- "data": template_data,
- "summary": summary,
- "actors_involved": len(set(actors_involved)),
- "issue": {
- "issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}",
- "name": issue.name,
- "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
- },
- "receiver": {
- "email": receiver.email,
- },
- "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
- "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/",
- "workspace":str(issue.project.workspace.slug),
- "project": str(issue.project.name),
- "user_preference": f"{base_api}/profile/preferences/email",
- "comments": comments,
- }
- html_content = render_to_string(
- "emails/notifications/issue-updates.html", context
- )
- text_content = strip_tags(html_content)
+ summary = "Updates were made to the issue by"
- try:
- connection = get_connection(
- host=EMAIL_HOST,
- port=int(EMAIL_PORT),
- username=EMAIL_HOST_USER,
- password=EMAIL_HOST_PASSWORD,
- use_tls=EMAIL_USE_TLS == "1",
+ # Send the mail
+ subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
+ context = {
+ "data": template_data,
+ "summary": summary,
+ "actors_involved": len(set(actors_involved)),
+ "issue": {
+ "issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}",
+ "name": issue.name,
+ "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
+ },
+ "receiver": {
+ "email": receiver.email,
+ },
+ "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
+ "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/",
+ "workspace":str(issue.project.workspace.slug),
+ "project": str(issue.project.name),
+ "user_preference": f"{base_api}/profile/preferences/email",
+ "comments": comments,
+ }
+ html_content = render_to_string(
+ "emails/notifications/issue-updates.html", context
)
+ text_content = strip_tags(html_content)
- msg = EmailMultiAlternatives(
- subject=subject,
- body=text_content,
- from_email=EMAIL_FROM,
- to=[receiver.email],
- connection=connection,
- )
- msg.attach_alternative(html_content, "text/html")
- msg.send()
+ try:
+ connection = get_connection(
+ host=EMAIL_HOST,
+ port=int(EMAIL_PORT),
+ username=EMAIL_HOST_USER,
+ password=EMAIL_HOST_PASSWORD,
+ use_tls=EMAIL_USE_TLS == "1",
+ )
- EmailNotificationLog.objects.filter(
- pk__in=email_notification_ids
- ).update(sent_at=timezone.now())
- return
- except Exception as e:
- print(e)
+ msg = EmailMultiAlternatives(
+ subject=subject,
+ body=text_content,
+ from_email=EMAIL_FROM,
+ to=[receiver.email],
+ connection=connection,
+ )
+ msg.attach_alternative(html_content, "text/html")
+ msg.send()
+
+ EmailNotificationLog.objects.filter(
+ pk__in=email_notification_ids
+ ).update(sent_at=timezone.now())
+ return
+ except Exception as e:
+ print(e)
+ return
+ except Issue.DoesNotExist:
return
diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py
index b9f6bd41103..b86ab5e783e 100644
--- a/apiserver/plane/bgtasks/issue_activites_task.py
+++ b/apiserver/plane/bgtasks/issue_activites_task.py
@@ -353,13 +353,18 @@ def track_assignees(
issue_activities,
epoch,
):
- requested_assignees = set(
- [str(asg) for asg in requested_data.get("assignee_ids", [])]
+ requested_assignees = (
+ set([str(asg) for asg in requested_data.get("assignee_ids", [])])
+ if requested_data is not None
+ else set()
)
- current_assignees = set(
- [str(asg) for asg in current_instance.get("assignee_ids", [])]
+ current_assignees = (
+ set([str(asg) for asg in current_instance.get("assignee_ids", [])])
+ if current_instance is not None
+ else set()
)
+
added_assignees = requested_assignees - current_assignees
dropped_assginees = current_assignees - requested_assignees
@@ -547,6 +552,20 @@ def create_issue_activity(
epoch=epoch,
)
)
+ requested_data = (
+ json.loads(requested_data) if requested_data is not None else None
+ )
+ if requested_data.get("assignee_ids") is not None:
+ track_assignees(
+ requested_data,
+ current_instance,
+ issue_id,
+ project_id,
+ workspace_id,
+ actor_id,
+ issue_activities,
+ epoch,
+ )
def update_issue_activity(
diff --git a/apiserver/plane/bgtasks/notification_task.py b/apiserver/plane/bgtasks/notification_task.py
index 6cfbec72a96..0a843e4a63a 100644
--- a/apiserver/plane/bgtasks/notification_task.py
+++ b/apiserver/plane/bgtasks/notification_task.py
@@ -515,7 +515,7 @@ def notifications(
bulk_email_logs.append(
EmailNotificationLog(
triggered_by_id=actor_id,
- receiver_id=subscriber,
+ receiver_id=mention_id,
entity_identifier=issue_id,
entity_name="issue",
data={
@@ -552,6 +552,7 @@ def notifications(
"old_value": str(
issue_activity.get("old_value")
),
+ "activity_time": issue_activity.get("created_at"),
},
},
)
@@ -639,6 +640,7 @@ def notifications(
"old_value": str(
last_activity.old_value
),
+ "activity_time": issue_activity.get("created_at"),
},
},
)
@@ -695,6 +697,7 @@ def notifications(
"old_value"
)
),
+ "activity_time": issue_activity.get("created_at"),
},
},
)
diff --git a/apiserver/plane/bgtasks/webhook_task.py b/apiserver/plane/bgtasks/webhook_task.py
index 34bba0cf87a..605f48dd944 100644
--- a/apiserver/plane/bgtasks/webhook_task.py
+++ b/apiserver/plane/bgtasks/webhook_task.py
@@ -7,6 +7,9 @@
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
+from django.core.mail import EmailMultiAlternatives, get_connection
+from django.template.loader import render_to_string
+from django.utils.html import strip_tags
# Third party imports
from celery import shared_task
@@ -22,10 +25,10 @@
ModuleIssue,
CycleIssue,
IssueComment,
+ User,
)
from plane.api.serializers import (
ProjectSerializer,
- IssueSerializer,
CycleSerializer,
ModuleSerializer,
CycleIssueSerializer,
@@ -34,6 +37,9 @@
IssueExpandSerializer,
)
+# Module imports
+from plane.license.utils.instance_value import get_email_configuration
+
SERIALIZER_MAPPER = {
"project": ProjectSerializer,
"issue": IssueExpandSerializer,
@@ -72,7 +78,7 @@ def get_model_data(event, event_id, many=False):
max_retries=5,
retry_jitter=True,
)
-def webhook_task(self, webhook, slug, event, event_data, action):
+def webhook_task(self, webhook, slug, event, event_data, action, current_site):
try:
webhook = Webhook.objects.get(id=webhook, workspace__slug=slug)
@@ -151,7 +157,18 @@ def webhook_task(self, webhook, slug, event, event_data, action):
response_body=str(e),
retry_count=str(self.request.retries),
)
-
+ # Retry logic
+ if self.request.retries >= self.max_retries:
+ Webhook.objects.filter(pk=webhook.id).update(is_active=False)
+ if webhook:
+ # send email for the deactivation of the webhook
+ send_webhook_deactivation_email(
+ webhook_id=webhook.id,
+ receiver_id=webhook.created_by_id,
+ reason=str(e),
+ current_site=current_site,
+ )
+ return
raise requests.RequestException()
except Exception as e:
@@ -162,7 +179,7 @@ def webhook_task(self, webhook, slug, event, event_data, action):
@shared_task()
-def send_webhook(event, payload, kw, action, slug, bulk):
+def send_webhook(event, payload, kw, action, slug, bulk, current_site):
try:
webhooks = Webhook.objects.filter(workspace__slug=slug, is_active=True)
@@ -216,6 +233,7 @@ def send_webhook(event, payload, kw, action, slug, bulk):
event=event,
event_data=data,
action=action,
+ current_site=current_site,
)
except Exception as e:
@@ -223,3 +241,56 @@ def send_webhook(event, payload, kw, action, slug, bulk):
print(e)
capture_exception(e)
return
+
+
+@shared_task
+def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reason):
+ # Get email configurations
+ (
+ EMAIL_HOST,
+ EMAIL_HOST_USER,
+ EMAIL_HOST_PASSWORD,
+ EMAIL_PORT,
+ EMAIL_USE_TLS,
+ EMAIL_FROM,
+ ) = get_email_configuration()
+
+ receiver = User.objects.get(pk=receiver_id)
+ webhook = Webhook.objects.get(pk=webhook_id)
+ subject="Webhook Deactivated"
+ message=f"Webhook {webhook.url} has been deactivated due to failed requests."
+
+ # Send the mail
+ context = {
+ "email": receiver.email,
+ "message": message,
+ "webhook_url":f"{current_site}/{str(webhook.workspace.slug)}/settings/webhooks/{str(webhook.id)}",
+ }
+ html_content = render_to_string(
+ "emails/notifications/webhook-deactivate.html", context
+ )
+ text_content = strip_tags(html_content)
+
+ try:
+ connection = get_connection(
+ host=EMAIL_HOST,
+ port=int(EMAIL_PORT),
+ username=EMAIL_HOST_USER,
+ password=EMAIL_HOST_PASSWORD,
+ use_tls=EMAIL_USE_TLS == "1",
+ )
+
+ msg = EmailMultiAlternatives(
+ subject=subject,
+ body=text_content,
+ from_email=EMAIL_FROM,
+ to=[receiver.email],
+ connection=connection,
+ )
+ msg.attach_alternative(html_content, "text/html")
+ msg.send()
+
+ return
+ except Exception as e:
+ print(e)
+ return
diff --git a/apiserver/plane/db/migrations/0059_auto_20240208_0957.py b/apiserver/plane/db/migrations/0059_auto_20240208_0957.py
new file mode 100644
index 00000000000..c4c43fa4bf6
--- /dev/null
+++ b/apiserver/plane/db/migrations/0059_auto_20240208_0957.py
@@ -0,0 +1,33 @@
+# Generated by Django 4.2.7 on 2024-02-08 09:57
+
+from django.db import migrations
+
+
+def widgets_filter_change(apps, schema_editor):
+ Widget = apps.get_model("db", "Widget")
+ widgets_to_update = []
+
+ # Define the filter dictionaries for each widget key
+ filters_mapping = {
+ "assigned_issues": {"duration": "none", "tab": "pending"},
+ "created_issues": {"duration": "none", "tab": "pending"},
+ "issues_by_state_groups": {"duration": "none"},
+ "issues_by_priority": {"duration": "none"},
+ }
+
+ # Iterate over widgets and update filters if applicable
+ for widget in Widget.objects.all():
+ if widget.key in filters_mapping:
+ widget.filters = filters_mapping[widget.key]
+ widgets_to_update.append(widget)
+
+ # Bulk update the widgets
+ Widget.objects.bulk_update(widgets_to_update, ["filters"], batch_size=10)
+
+class Migration(migrations.Migration):
+ dependencies = [
+ ('db', '0058_alter_moduleissue_issue_and_more'),
+ ]
+ operations = [
+ migrations.RunPython(widgets_filter_change)
+ ]
diff --git a/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py b/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py
new file mode 100644
index 00000000000..074e20a16b8
--- /dev/null
+++ b/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py
@@ -0,0 +1,18 @@
+# Generated by Django 4.2.7 on 2024-02-08 09:18
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0059_auto_20240208_0957'),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name='cycle',
+ name='progress_snapshot',
+ field=models.JSONField(default=dict),
+ ),
+ ]
diff --git a/apiserver/plane/db/models/cycle.py b/apiserver/plane/db/models/cycle.py
index 5251c68ec9f..d802dbc1e09 100644
--- a/apiserver/plane/db/models/cycle.py
+++ b/apiserver/plane/db/models/cycle.py
@@ -68,6 +68,7 @@ class Cycle(ProjectBaseModel):
sort_order = models.FloatField(default=65535)
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
+ progress_snapshot = models.JSONField(default=dict)
class Meta:
verbose_name = "Cycle"
diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py
index 6f8a82e5672..f254a3cb734 100644
--- a/apiserver/plane/db/models/user.py
+++ b/apiserver/plane/db/models/user.py
@@ -172,4 +172,9 @@ def create_user_notification(sender, instance, created, **kwargs):
from plane.db.models import UserNotificationPreference
UserNotificationPreference.objects.create(
user=instance,
+ property_change=False,
+ state_change=False,
+ comment=False,
+ mention=False,
+ issue_completed=False,
)
diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py
index 444248382ff..f032092504c 100644
--- a/apiserver/plane/settings/common.py
+++ b/apiserver/plane/settings/common.py
@@ -282,10 +282,8 @@
redis_url = os.environ.get("REDIS_URL")
broker_url = f"{redis_url}?ssl_cert_reqs={ssl.CERT_NONE.name}&ssl_ca_certs={certifi.where()}"
CELERY_BROKER_URL = broker_url
- CELERY_RESULT_BACKEND = broker_url
else:
CELERY_BROKER_URL = REDIS_URL
- CELERY_RESULT_BACKEND = REDIS_URL
CELERY_IMPORTS = (
"plane.bgtasks.issue_automation_task",
diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt
index 0e7a18fa86b..194bf8d903a 100644
--- a/apiserver/requirements/base.txt
+++ b/apiserver/requirements/base.txt
@@ -1,6 +1,6 @@
# base requirements
-Django==4.2.7
+Django==4.2.10
psycopg==3.1.12
djangorestframework==3.14.0
redis==4.6.0
@@ -30,7 +30,7 @@ openpyxl==3.1.2
beautifulsoup4==4.12.2
dj-database-url==2.1.0
posthog==3.0.2
-cryptography==41.0.6
+cryptography==42.0.0
lxml==4.9.3
boto3==1.28.40
diff --git a/apiserver/templates/emails/notifications/issue-updates.html b/apiserver/templates/emails/notifications/issue-updates.html
index fa50631c557..3c561f37ac2 100644
--- a/apiserver/templates/emails/notifications/issue-updates.html
+++ b/apiserver/templates/emails/notifications/issue-updates.html
@@ -66,7 +66,7 @@
style="margin-left: 30px; margin-bottom: 20px; margin-top: 20px"
>
- {% if actors_involved == 1 %}
-
- {{summary}} - - {{ data.0.actor_detail.first_name}} - {{data.0.actor_detail.last_name}} - . -
- {% else %} -- {{summary}} - - {{ data.0.actor_detail.first_name}} - {{data.0.actor_detail.last_name }} - and others. -
- {% endif %} - - + {% if actors_involved == 1 %} ++ {{summary}} + + {% if data|length > 0 %} + {{ data.0.actor_detail.first_name}} + {{data.0.actor_detail.last_name}} + {% else %} + {{ comments.0.actor_detail.first_name}} + {{comments.0.actor_detail.last_name}} + {% endif %} + . +
+ {% else %} ++ {{summary}} + + {% if data|length > 0 %} + {{ data.0.actor_detail.first_name}} + {{data.0.actor_detail.last_name}} + {% else %} + {{ comments.0.actor_detail.first_name}} + {{comments.0.actor_detail.last_name}} + {% endif %} + and others. +
+ {% endif %} + + + +
+
|
+