diff --git a/.deepsource.toml b/.deepsource.toml
new file mode 100644
index 00000000000..2b40af672d7
--- /dev/null
+++ b/.deepsource.toml
@@ -0,0 +1,23 @@
+version = 1
+
+exclude_patterns = [
+ "bin/**",
+ "**/node_modules/",
+ "**/*.min.js"
+]
+
+[[analyzers]]
+name = "shell"
+
+[[analyzers]]
+name = "javascript"
+
+ [analyzers.meta]
+ plugins = ["react"]
+ environment = ["nodejs"]
+
+[[analyzers]]
+name = "python"
+
+ [analyzers.meta]
+ runtime_version = "3.x.x"
\ No newline at end of file
diff --git a/.dockerignore b/.dockerignore
index 45ff21c4f9d..6d52ca7c809 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -2,5 +2,16 @@
*.pyc
.env
venv
-node_modules
-npm-debug.log
\ No newline at end of file
+node_modules/
+**/node_modules/
+npm-debug.log
+.next/
+**/.next/
+.turbo/
+**/.turbo/
+build/
+**/build/
+out/
+**/out/
+dist/
+**/dist/
\ No newline at end of file
diff --git a/.env.example b/.env.example
index 082aa753b80..90070de1986 100644
--- a/.env.example
+++ b/.env.example
@@ -21,15 +21,15 @@ AWS_S3_BUCKET_NAME="uploads"
FILE_SIZE_LIMIT=5242880
# GPT settings
-OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
-OPENAI_API_KEY="sk-" # add your openai key here
-GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
# Settings related to Docker
-DOCKERIZED=1
+DOCKERIZED=1 # deprecated
+
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
-
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
new file mode 100644
index 00000000000..d25154b15bb
--- /dev/null
+++ b/.github/workflows/build-branch.yml
@@ -0,0 +1,228 @@
+name: Branch Build
+
+on:
+ pull_request:
+ types:
+ - closed
+ branches:
+ - master
+ - preview
+ - qa
+ - develop
+ release:
+ types: [released, prereleased]
+
+env:
+ TARGET_BRANCH: ${{ github.event.pull_request.base.ref || github.event.release.target_commitish }}
+
+jobs:
+ branch_build_setup:
+ if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) || github.event_name == 'release' }}
+ name: Build-Push Web/Space/API/Proxy Docker Image
+ runs-on: ubuntu-20.04
+
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3.3.0
+
+ - name: Uploading Proxy Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: proxy-src-code
+ path: ./nginx
+ - name: Uploading Backend Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: backend-src-code
+ path: ./apiserver
+ - name: Uploading Web Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: web-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./space
+ - name: Uploading Space Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: space-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./web
+ outputs:
+ gh_branch_name: ${{ env.TARGET_BRANCH }}
+
+ branch_build_push_frontend:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ FRONTEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Frontend Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable
+ else
+ TAG=${{ env.FRONTEND_TAG }}
+ fi
+ echo "FRONTEND_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Web Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: web-src-code
+
+ - name: Build and Push Frontend to Docker Container Registry
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./web/Dockerfile.web
+ platforms: linux/amd64
+ tags: ${{ env.FRONTEND_TAG }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_space:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ SPACE_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Space Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable
+ else
+ TAG=${{ env.SPACE_TAG }}
+ fi
+ echo "SPACE_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Space Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: space-src-code
+
+ - name: Build and Push Space to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./space/Dockerfile.space
+ platforms: linux/amd64
+ tags: ${{ env.SPACE_TAG }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_backend:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ BACKEND_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Backend Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable
+ else
+ TAG=${{ env.BACKEND_TAG }}
+ fi
+ echo "BACKEND_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Backend Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: backend-src-code
+
+ - name: Build and Push Backend to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile.api
+ platforms: linux/amd64
+ push: true
+ tags: ${{ env.BACKEND_TAG }}
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_proxy:
+ runs-on: ubuntu-20.04
+ needs: [branch_build_setup]
+ env:
+ PROXY_TAG: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ steps:
+ - name: Set Proxy Docker Tag
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
+ elif [ "${{ needs.branch_build_setup.outputs.gh_branch_name }}" == "master" ]; then
+ TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable
+ else
+ TAG=${{ env.PROXY_TAG }}
+ fi
+ echo "PROXY_TAG=${TAG}" >> $GITHUB_ENV
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Downloading Proxy Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: proxy-src-code
+
+ - name: Build and Push Plane-Proxy to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile
+ platforms: linux/amd64
+ tags: ${{ env.PROXY_TAG }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml
index 6dc7ae1e5e5..c74975f48ef 100644
--- a/.github/workflows/build-test-pull-request.yml
+++ b/.github/workflows/build-test-pull-request.yml
@@ -36,15 +36,13 @@ jobs:
- name: Build Plane's Main App
if: steps.changed-files.outputs.web_any_changed == 'true'
run: |
- cd web
yarn
- yarn build
+ yarn build --filter=web
- name: Build Plane's Deploy App
if: steps.changed-files.outputs.deploy_any_changed == 'true'
run: |
- cd space
yarn
- yarn build
+ yarn build --filter=space
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 00000000000..29fbde45365
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,65 @@
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ 'develop', 'hot-fix', 'stage-release' ]
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ 'develop' ]
+ schedule:
+ - cron: '53 19 * * 5'
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'python', 'javascript' ]
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
+ # Use only 'java' to analyze code written in Java, Kotlin or both
+ # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
+ # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+
+ # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
+ # queries: security-extended,security-and-quality
+
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v2
+
+ # βΉοΈ Command-line programs to run using the OS shell.
+ # π See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
+
+ # If the Autobuild fails above, remove it and uncomment the following three lines.
+ # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
+
+ # - run: |
+ # echo "Run, Build Application using script"
+ # ./location_of_script_within_repo/buildscript.sh
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v2
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml
index 28e47a0d66b..c8e27f32216 100644
--- a/.github/workflows/create-sync-pr.yml
+++ b/.github/workflows/create-sync-pr.yml
@@ -2,6 +2,8 @@ name: Create PR in Plane EE Repository to sync the changes
on:
pull_request:
+ branches:
+ - master
types:
- closed
diff --git a/.github/workflows/update-docker-images.yml b/.github/workflows/update-docker-images.yml
deleted file mode 100644
index 67ae97e758c..00000000000
--- a/.github/workflows/update-docker-images.yml
+++ /dev/null
@@ -1,107 +0,0 @@
-name: Update Docker Images for Plane on Release
-
-on:
- release:
- types: [released, prereleased]
-
-jobs:
- build_push_backend:
- name: Build and Push Api Server Docker Image
- runs-on: ubuntu-20.04
-
- steps:
- - name: Check out the repo
- uses: actions/checkout@v3.3.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2.5.0
-
- - name: Login to Docker Hub
- uses: docker/login-action@v2.1.0
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaFrontend
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
- tags: |
- type=ref,event=tag
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaBackend
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
- tags: |
- type=ref,event=tag
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaSpace
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space
- tags: |
- type=ref,event=tag
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaProxy
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
- tags: |
- type=ref,event=tag
-
- - name: Build and Push Frontend to Docker Container Registry
- uses: docker/build-push-action@v4.0.0
- with:
- context: .
- file: ./web/Dockerfile.web
- platforms: linux/amd64
- tags: ${{ steps.metaFrontend.outputs.tags }}
- push: true
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and Push Backend to Docker Hub
- uses: docker/build-push-action@v4.0.0
- with:
- context: ./apiserver
- file: ./apiserver/Dockerfile.api
- platforms: linux/amd64
- push: true
- tags: ${{ steps.metaBackend.outputs.tags }}
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and Push Plane-Deploy to Docker Hub
- uses: docker/build-push-action@v4.0.0
- with:
- context: .
- file: ./space/Dockerfile.space
- platforms: linux/amd64
- push: true
- tags: ${{ steps.metaSpace.outputs.tags }}
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and Push Plane-Proxy to Docker Hub
- uses: docker/build-push-action@v4.0.0
- with:
- context: ./nginx
- file: ./nginx/Dockerfile
- platforms: linux/amd64
- push: true
- tags: ${{ steps.metaProxy.outputs.tags }}
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.gitignore b/.gitignore
index 1e99e102ad5..0b655bd0e75 100644
--- a/.gitignore
+++ b/.gitignore
@@ -16,6 +16,8 @@ node_modules
# Production
/build
+dist/
+out/
# Misc
.DS_Store
@@ -73,3 +75,8 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
+.secrets
+tmp/
+## packages
+dist
+.temp/
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index cd74b612133..9fa847b6e76 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
-hello@plane.so.
+squawk@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index b25a791d080..73d69fb2d5b 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -8,8 +8,8 @@ Before submitting a new issue, please search the [issues](https://github.com/mak
While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
-- 3rd-party libraries being used and their versions
-- a use-case that fails
+- 3rd-party libraries being used and their versions
+- a use-case that fails
Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved.
@@ -19,10 +19,10 @@ You can open a new issue with this [issue form](https://github.com/makeplane/pla
### Requirements
-- Node.js version v16.18.0
-- Python version 3.8+
-- Postgres version v14
-- Redis version v6.2.7
+- Node.js version v16.18.0
+- Python version 3.8+
+- Postgres version v14
+- Redis version v6.2.7
### Setup the project
@@ -81,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt
To ensure consistency throughout the source code, please keep these rules in mind as you are working:
-- All features or bug fixes must be tested by one or more specs (unit-tests).
-- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
+- All features or bug fixes must be tested by one or more specs (unit-tests).
+- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Need help? Questions and suggestions
@@ -90,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in
## Ways to contribute
-- Try Plane Cloud and the self hosting platform and give feedback
-- Add new integrations
-- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
-- Share your thoughts and suggestions with us
-- Help create tutorials and blog posts
-- Request a feature by submitting a proposal
-- Report a bug
-- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
+- Try Plane Cloud and the self hosting platform and give feedback
+- Add new integrations
+- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
+- Share your thoughts and suggestions with us
+- Help create tutorials and blog posts
+- Request a feature by submitting a proposal
+- Report a bug
+- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
diff --git a/Dockerfile b/Dockerfile
index 388c5a4ef99..0f4ecfd364f 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -43,8 +43,6 @@ FROM python:3.11.1-alpine3.17 AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
-ENV DJANGO_SETTINGS_MODULE plane.settings.production
-ENV DOCKERIZED 1
WORKDIR /code
@@ -81,7 +79,6 @@ COPY apiserver/manage.py manage.py
COPY apiserver/plane plane/
COPY apiserver/templates templates/
-COPY apiserver/gunicorn.config.py ./
RUN apk --no-cache add "bash~=5.2"
COPY apiserver/bin ./bin/
diff --git a/ENV_SETUP.md b/ENV_SETUP.md
new file mode 100644
index 00000000000..bfc30019624
--- /dev/null
+++ b/ENV_SETUP.md
@@ -0,0 +1,131 @@
+# Environment Variables
+
+β
+Environment variables are distributed in various files. Please refer them carefully.
+
+## {PROJECT_FOLDER}/.env
+
+File is available in the project root folderβ
+
+```
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+β
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+β
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+β
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
+β
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+β
+# Nginx Configuration
+NGINX_PORT=80
+```
+
+β
+
+## {PROJECT_FOLDER}/web/.env.example
+
+β
+
+```
+# Public boards deploy URL
+NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
+```
+
+
+## {PROJECT_FOLDER}/apiserver/.env
+
+β
+
+```
+# Backend
+# Debug value for api server use it as 0 for production use
+DEBUG=0
+β
+# Error logs
+SENTRY_DSN=""
+β
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+β
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+β
+# Email Settings
+EMAIL_HOST=""
+EMAIL_HOST_USER=""
+EMAIL_HOST_PASSWORD=""
+EMAIL_PORT=587
+EMAIL_FROM="Team Plane "
+EMAIL_USE_TLS="1"
+EMAIL_USE_SSL="0"
+β
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+β
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
+β
+# Settings related to Docker
+DOCKERIZED=1 # Deprecated
+
+# Github
+GITHUB_CLIENT_SECRET="" # For fetching release notes
+β
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+β
+# Nginx Configuration
+NGINX_PORT=80
+β
+β
+# SignUps
+ENABLE_SIGNUP="1"
+β
+# Email Redirection URL
+WEB_URL="http://localhost"
+```
+
+## Updatesβ
+
+- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
+- The naming convention for containers and images has been updated.
+- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
+- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
+- The image name for Plane deployment has been changed to plane-space.
diff --git a/README.md b/README.md
index f9d969d72c9..3f74043053a 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
Plane
-Open-source, self-hosted project planning tool
+Flexible, extensible open-source project management
@@ -39,33 +39,31 @@ Meet [Plane](https://plane.so). An open-source software development tool to mana
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
-## β‘οΈ Quick start with Docker Compose
+## β‘οΈ Contributors Quick Start
-### Docker Compose Setup
+### Prerequisite
-- Clone the repository
+Development system must have docker engine installed and running.
-```bash
-git clone https://github.com/makeplane/plane
-cd plane
-chmod +x setup.sh
-```
+### Steps
-- Run setup.sh
+Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
-```bash
-./setup.sh
-```
+1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
+1. Switch to the code folder `cd plane`
+1. Create your feature or fix branch you plan to work on using `git checkout -b `
+1. Open terminal and run `./setup.sh`
+1. Open the code on VSCode or similar equivalent IDE
+1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
+1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
-> If running in a cloud env replace localhost with public facing IP address of the VM
+You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
-- Run Docker compose up
+Thats it!
-```bash
-docker compose up -d
-```
+## π Self Hosting
-You can use the default email and password for your first login `captain@plane.so` and `password123`.
+For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
## π Features
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 8193b5e7716..37178b39809 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -1,10 +1,11 @@
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
-DJANGO_SETTINGS_MODULE="plane.settings.production"
+CORS_ALLOWED_ORIGINS=""
# Error logs
SENTRY_DSN=""
+SENTRY_ENVIRONMENT="development"
# Database Settings
PGUSER="plane"
@@ -13,20 +14,16 @@ PGHOST="plane-db"
PGDATABASE="plane"
DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+# Oauth variables
+GOOGLE_CLIENT_ID=""
+GITHUB_CLIENT_ID=""
+GITHUB_CLIENT_SECRET=""
+
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
-# Email Settings
-EMAIL_HOST=""
-EMAIL_HOST_USER=""
-EMAIL_HOST_PASSWORD=""
-EMAIL_PORT=587
-EMAIL_FROM="Team Plane "
-EMAIL_USE_TLS="1"
-EMAIL_USE_SSL="0"
-
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
@@ -38,29 +35,26 @@ AWS_S3_BUCKET_NAME="uploads"
FILE_SIZE_LIMIT=5242880
# GPT settings
-OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
-OPENAI_API_KEY="sk-" # add your openai key here
-GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
# Github
GITHUB_CLIENT_SECRET="" # For fetching release notes
# Settings related to Docker
-DOCKERIZED=1
+DOCKERIZED=1 # deprecated
+
# set to 1 If using the pre-configured minio setup
USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
-# Default Creds
-DEFAULT_EMAIL="captain@plane.so"
-DEFAULT_PASSWORD="password123"
# SignUps
ENABLE_SIGNUP="1"
-
# Enable Email/Password Signup
ENABLE_EMAIL_PASSWORD="1"
@@ -70,3 +64,6 @@ ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"
+# Gunicorn Workers
+GUNICORN_WORKERS=2
+
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 15c3f53a92b..0e4e0ac501b 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -43,8 +43,7 @@ USER captain
COPY manage.py manage.py
COPY plane plane/
COPY templates templates/
-
-COPY gunicorn.config.py ./
+COPY package.json package.json
USER root
RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
new file mode 100644
index 00000000000..d5202073552
--- /dev/null
+++ b/apiserver/Dockerfile.dev
@@ -0,0 +1,53 @@
+FROM python:3.11.1-alpine3.17 AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+
+RUN apk --no-cache add \
+ "bash~=5.2" \
+ "libpq~=15" \
+ "libxslt~=1.1" \
+ "nodejs-current~=19" \
+ "xmlsec~=1.2" \
+ "libffi-dev" \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
+ "git~=2" \
+ "make~=4.3" \
+ "postgresql13-dev~=13" \
+ "libc-dev" \
+ "linux-headers"
+
+WORKDIR /code
+
+COPY requirements.txt ./requirements.txt
+ADD requirements ./requirements
+
+# Install the local development settings
+RUN pip install -r requirements/local.txt --compile --no-cache-dir
+
+RUN addgroup -S plane && \
+ adduser -S captain -G plane
+
+RUN chown captain.plane /code
+
+USER captain
+
+# Add in Django deps and generate Django's static files
+
+USER root
+
+# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
+RUN chmod -R 777 /code
+
+USER captain
+
+# Expose container port and run entry point script
+EXPOSE 8000
+
+# CMD [ "./bin/takeoff" ]
+
diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff
index dc25a14e2d1..0ec2e495ca8 100755
--- a/apiserver/bin/takeoff
+++ b/apiserver/bin/takeoff
@@ -3,7 +3,28 @@ set -e
python manage.py wait_for_db
python manage.py migrate
-# Create a Default User
-python bin/user_script.py
+# Create the default bucket
+#!/bin/bash
+
+# Collect system information
+HOSTNAME=$(hostname)
+MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
+CPU_INFO=$(cat /proc/cpuinfo)
+MEMORY_INFO=$(free -h)
+DISK_INFO=$(df -h)
+
+# Concatenate information and compute SHA-256 hash
+SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
+
+# Export the variables
+export MACHINE_SIGNATURE=$SIGNATURE
+
+# Register instance
+python manage.py register_instance $MACHINE_SIGNATURE
+# Load the configuration variable
+python manage.py configure_instance
+
+# Create the default bucket
+python manage.py create_bucket
-exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
+exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:${PORT:-8000} --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py
deleted file mode 100644
index e115b20b8c0..00000000000
--- a/apiserver/bin/user_script.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import os, sys, random, string
-import uuid
-
-sys.path.append("/code")
-
-os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
-import django
-
-django.setup()
-
-from plane.db.models import User
-
-
-def populate():
- default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
- default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
-
- if not User.objects.filter(email=default_email).exists():
- user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
- user.set_password(default_password)
- user.save()
- print(f"User created with an email: {default_email}")
- else:
- print(f"User already exists with the default email: {default_email}")
-
-
-if __name__ == "__main__":
- populate()
diff --git a/apiserver/file.txt b/apiserver/file.txt
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/apiserver/gunicorn.config.py b/apiserver/gunicorn.config.py
deleted file mode 100644
index 67205b5ec94..00000000000
--- a/apiserver/gunicorn.config.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from psycogreen.gevent import patch_psycopg
-
-
-def post_fork(server, worker):
- patch_psycopg()
- worker.log.info("Made Psycopg2 Green")
\ No newline at end of file
diff --git a/apiserver/package.json b/apiserver/package.json
new file mode 100644
index 00000000000..a317b477680
--- /dev/null
+++ b/apiserver/package.json
@@ -0,0 +1,4 @@
+{
+ "name": "plane-api",
+ "version": "0.14.0"
+}
diff --git a/apiserver/plane/api/apps.py b/apiserver/plane/api/apps.py
index 6ba36e7e558..292ad934476 100644
--- a/apiserver/plane/api/apps.py
+++ b/apiserver/plane/api/apps.py
@@ -2,4 +2,4 @@
class ApiConfig(AppConfig):
- name = "plane.api"
+ name = "plane.api"
\ No newline at end of file
diff --git a/apiserver/plane/api/middleware/__init__.py b/apiserver/plane/api/middleware/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/apiserver/plane/api/middleware/api_authentication.py b/apiserver/plane/api/middleware/api_authentication.py
new file mode 100644
index 00000000000..1b2c033182d
--- /dev/null
+++ b/apiserver/plane/api/middleware/api_authentication.py
@@ -0,0 +1,47 @@
+# Django imports
+from django.utils import timezone
+from django.db.models import Q
+
+# Third party imports
+from rest_framework import authentication
+from rest_framework.exceptions import AuthenticationFailed
+
+# Module imports
+from plane.db.models import APIToken
+
+
+class APIKeyAuthentication(authentication.BaseAuthentication):
+ """
+ Authentication with an API Key
+ """
+
+ www_authenticate_realm = "api"
+ media_type = "application/json"
+ auth_header_name = "X-Api-Key"
+
+ def get_api_token(self, request):
+ return request.headers.get(self.auth_header_name)
+
+ def validate_api_token(self, token):
+ try:
+ api_token = APIToken.objects.get(
+ Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
+ token=token,
+ is_active=True,
+ )
+ except APIToken.DoesNotExist:
+ raise AuthenticationFailed("Given API token is not valid")
+
+ # save api token last used
+ api_token.last_used = timezone.now()
+ api_token.save(update_fields=["last_used"])
+ return (api_token.user, api_token.token)
+
+ def authenticate(self, request):
+ token = self.get_api_token(request=request)
+ if not token:
+ return None
+
+ # Validate the API token
+ user, token = self.validate_api_token(token)
+ return user, token
\ No newline at end of file
diff --git a/apiserver/plane/api/permissions/__init__.py b/apiserver/plane/api/permissions/__init__.py
deleted file mode 100644
index 8b15a93733f..00000000000
--- a/apiserver/plane/api/permissions/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .workspace import WorkSpaceBasePermission, WorkSpaceAdminPermission, WorkspaceEntityPermission, WorkspaceViewerPermission
-from .project import ProjectBasePermission, ProjectEntityPermission, ProjectMemberPermission, ProjectLitePermission
diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/api/permissions/project.py
deleted file mode 100644
index e4e3e0f9bc3..00000000000
--- a/apiserver/plane/api/permissions/project.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Third Party imports
-from rest_framework.permissions import BasePermission, SAFE_METHODS
-
-# Module import
-from plane.db.models import WorkspaceMember, ProjectMember
-
-# Permission Mappings
-Admin = 20
-Member = 15
-Viewer = 10
-Guest = 5
-
-
-class ProjectBasePermission(BasePermission):
- def has_permission(self, request, view):
-
- if request.user.is_anonymous:
- return False
-
- ## Safe Methods -> Handle the filtering logic in queryset
- if request.method in SAFE_METHODS:
- return WorkspaceMember.objects.filter(
- workspace__slug=view.workspace_slug, member=request.user
- ).exists()
-
- ## Only workspace owners or admins can create the projects
- if request.method == "POST":
- return WorkspaceMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- role__in=[Admin, Member],
- ).exists()
-
- ## Only Project Admins can update project attributes
- return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- role=Admin,
- project_id=view.project_id,
- ).exists()
-
-
-class ProjectMemberPermission(BasePermission):
- def has_permission(self, request, view):
-
- if request.user.is_anonymous:
- return False
-
- ## Safe Methods -> Handle the filtering logic in queryset
- if request.method in SAFE_METHODS:
- return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug, member=request.user
- ).exists()
- ## Only workspace owners or admins can create the projects
- if request.method == "POST":
- return WorkspaceMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- role__in=[Admin, Member],
- ).exists()
-
- ## Only Project Admins can update project attributes
- return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- role__in=[Admin, Member],
- project_id=view.project_id,
- ).exists()
-
-
-class ProjectEntityPermission(BasePermission):
- def has_permission(self, request, view):
-
- if request.user.is_anonymous:
- return False
-
- ## Safe Methods -> Handle the filtering logic in queryset
- if request.method in SAFE_METHODS:
- return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- project_id=view.project_id,
- ).exists()
-
- ## Only project members or admins can create and edit the project attributes
- return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- role__in=[Admin, Member],
- project_id=view.project_id,
- ).exists()
-
-
-class ProjectLitePermission(BasePermission):
-
- def has_permission(self, request, view):
- if request.user.is_anonymous:
- return False
-
- return ProjectMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- project_id=view.project_id,
- ).exists()
\ No newline at end of file
diff --git a/apiserver/plane/api/permissions/workspace.py b/apiserver/plane/api/permissions/workspace.py
deleted file mode 100644
index 66e8366146c..00000000000
--- a/apiserver/plane/api/permissions/workspace.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Third Party imports
-from rest_framework.permissions import BasePermission, SAFE_METHODS
-
-# Module imports
-from plane.db.models import WorkspaceMember
-
-
-# Permission Mappings
-Owner = 20
-Admin = 15
-Member = 10
-Guest = 5
-
-
-# TODO: Move the below logic to python match - python v3.10
-class WorkSpaceBasePermission(BasePermission):
- def has_permission(self, request, view):
- # allow anyone to create a workspace
- if request.user.is_anonymous:
- return False
-
- if request.method == "POST":
- return True
-
- ## Safe Methods
- if request.method in SAFE_METHODS:
- return True
-
- # allow only admins and owners to update the workspace settings
- if request.method in ["PUT", "PATCH"]:
- return WorkspaceMember.objects.filter(
- member=request.user,
- workspace__slug=view.workspace_slug,
- role__in=[Owner, Admin],
- ).exists()
-
- # allow only owner to delete the workspace
- if request.method == "DELETE":
- return WorkspaceMember.objects.filter(
- member=request.user, workspace__slug=view.workspace_slug, role=Owner
- ).exists()
-
-
-class WorkSpaceAdminPermission(BasePermission):
- def has_permission(self, request, view):
- if request.user.is_anonymous:
- return False
-
- return WorkspaceMember.objects.filter(
- member=request.user,
- workspace__slug=view.workspace_slug,
- role__in=[Owner, Admin],
- ).exists()
-
-
-class WorkspaceEntityPermission(BasePermission):
- def has_permission(self, request, view):
- if request.user.is_anonymous:
- return False
-
- ## Safe Methods -> Handle the filtering logic in queryset
- if request.method in SAFE_METHODS:
- return WorkspaceMember.objects.filter(
- workspace__slug=view.workspace_slug,
- member=request.user,
- ).exists()
-
- return WorkspaceMember.objects.filter(
- member=request.user,
- workspace__slug=view.workspace_slug,
- role__in=[Owner, Admin],
- ).exists()
-
-
-class WorkspaceViewerPermission(BasePermission):
- def has_permission(self, request, view):
- if request.user.is_anonymous:
- return False
-
- return WorkspaceMember.objects.filter(
- member=request.user, workspace__slug=view.workspace_slug, role__gte=10
- ).exists()
diff --git a/apiserver/plane/api/rate_limit.py b/apiserver/plane/api/rate_limit.py
new file mode 100644
index 00000000000..f91e2d65d84
--- /dev/null
+++ b/apiserver/plane/api/rate_limit.py
@@ -0,0 +1,41 @@
+from rest_framework.throttling import SimpleRateThrottle
+
+class ApiKeyRateThrottle(SimpleRateThrottle):
+ scope = 'api_key'
+ rate = '60/minute'
+
+ def get_cache_key(self, request, view):
+ # Retrieve the API key from the request header
+ api_key = request.headers.get('X-Api-Key')
+ if not api_key:
+ return None # Allow the request if there's no API key
+
+ # Use the API key as part of the cache key
+ return f'{self.scope}:{api_key}'
+
+ def allow_request(self, request, view):
+ allowed = super().allow_request(request, view)
+
+ if allowed:
+ now = self.timer()
+ # Calculate the remaining limit and reset time
+ history = self.cache.get(self.key, [])
+
+ # Remove old histories
+ while history and history[-1] <= now - self.duration:
+ history.pop()
+
+ # Calculate the requests
+ num_requests = len(history)
+
+ # Check available requests
+ available = self.num_requests - num_requests
+
+ # Unix timestamp for when the rate limit will reset
+ reset_time = int(now + self.duration)
+
+ # Add headers
+ request.META['X-RateLimit-Remaining'] = max(0, available)
+ request.META['X-RateLimit-Reset'] = reset_time
+
+ return allowed
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py
index dbf7ca0496a..1fd1bce7816 100644
--- a/apiserver/plane/api/serializers/__init__.py
+++ b/apiserver/plane/api/serializers/__init__.py
@@ -1,87 +1,17 @@
-from .base import BaseSerializer
-from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
-from .workspace import (
- WorkSpaceSerializer,
- WorkSpaceMemberSerializer,
- TeamSerializer,
- WorkSpaceMemberInviteSerializer,
- WorkspaceLiteSerializer,
- WorkspaceThemeSerializer,
- WorkspaceMemberAdminSerializer,
-)
-from .project import (
- ProjectSerializer,
- ProjectDetailSerializer,
- ProjectMemberSerializer,
- ProjectMemberInviteSerializer,
- ProjectIdentifierSerializer,
- ProjectFavoriteSerializer,
- ProjectLiteSerializer,
- ProjectMemberLiteSerializer,
- ProjectDeployBoardSerializer,
- ProjectMemberAdminSerializer,
- ProjectPublicMemberSerializer
-)
-from .state import StateSerializer, StateLiteSerializer
-from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
-from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
-from .asset import FileAssetSerializer
+from .user import UserLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+from .project import ProjectSerializer, ProjectLiteSerializer
from .issue import (
- IssueCreateSerializer,
- IssueActivitySerializer,
- IssueCommentSerializer,
- IssuePropertySerializer,
- IssueAssigneeSerializer,
- LabelSerializer,
IssueSerializer,
- IssueFlatSerializer,
- IssueStateSerializer,
+ LabelSerializer,
IssueLinkSerializer,
- IssueLiteSerializer,
IssueAttachmentSerializer,
- IssueSubscriberSerializer,
- IssueReactionSerializer,
- CommentReactionSerializer,
- IssueVoteSerializer,
- IssueRelationSerializer,
- RelatedIssueSerializer,
- IssuePublicSerializer,
-)
-
-from .module import (
- ModuleWriteSerializer,
- ModuleSerializer,
- ModuleIssueSerializer,
- ModuleLinkSerializer,
- ModuleFavoriteSerializer,
-)
-
-from .api_token import APITokenSerializer
-
-from .integration import (
- IntegrationSerializer,
- WorkspaceIntegrationSerializer,
- GithubIssueSyncSerializer,
- GithubRepositorySerializer,
- GithubRepositorySyncSerializer,
- GithubCommentSyncSerializer,
- SlackProjectSyncSerializer,
-)
-
-from .importer import ImporterSerializer
-
-from .page import PageSerializer, PageBlockSerializer, PageFavoriteSerializer
-
-from .estimate import (
- EstimateSerializer,
- EstimatePointSerializer,
- EstimateReadSerializer,
+ IssueCommentSerializer,
+ IssueAttachmentSerializer,
+ IssueActivitySerializer,
+ IssueExpandSerializer,
)
-
-from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSerializer
-
-from .analytic import AnalyticViewSerializer
-
-from .notification import NotificationSerializer
-
-from .exporter import ExporterHistorySerializer
+from .state import StateLiteSerializer, StateSerializer
+from .cycle import CycleSerializer, CycleIssueSerializer, CycleLiteSerializer
+from .module import ModuleSerializer, ModuleIssueSerializer, ModuleLiteSerializer
+from .inbox import InboxIssueSerializer
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/api/serializers/analytic.py
deleted file mode 100644
index 5f35e111787..00000000000
--- a/apiserver/plane/api/serializers/analytic.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from .base import BaseSerializer
-from plane.db.models import AnalyticView
-from plane.utils.issue_filters import issue_filters
-
-
-class AnalyticViewSerializer(BaseSerializer):
- class Meta:
- model = AnalyticView
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "query",
- ]
-
- def create(self, validated_data):
- query_params = validated_data.get("query_dict", {})
- if bool(query_params):
- validated_data["query"] = issue_filters(query_params, "POST")
- else:
- validated_data["query"] = dict()
- return AnalyticView.objects.create(**validated_data)
-
- def update(self, instance, validated_data):
- query_params = validated_data.get("query_data", {})
- if bool(query_params):
- validated_data["query"] = issue_filters(query_params, "POST")
- else:
- validated_data["query"] = dict()
- validated_data["query"] = issue_filters(query_params, "PATCH")
- return super().update(instance, validated_data)
diff --git a/apiserver/plane/api/serializers/api_token.py b/apiserver/plane/api/serializers/api_token.py
deleted file mode 100644
index 9c363f89569..00000000000
--- a/apiserver/plane/api/serializers/api_token.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from .base import BaseSerializer
-from plane.db.models import APIToken
-
-
-class APITokenSerializer(BaseSerializer):
- class Meta:
- model = APIToken
- fields = [
- "label",
- "user",
- "user_type",
- "workspace",
- "created_at",
- ]
diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py
index 0c6bba46823..b964225011d 100644
--- a/apiserver/plane/api/serializers/base.py
+++ b/apiserver/plane/api/serializers/base.py
@@ -1,5 +1,105 @@
+# Third party imports
from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", [])
+ self.expand = kwargs.pop("expand", []) or []
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields:
+ self.fields = self._filter_fields(fields=fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ # Convert the current serializer's fields and the allowed fields to sets.
+ existing = set(self.fields)
+ allowed = set(allowed)
+
+ # Remove fields from the serializer that aren't in the 'allowed' list.
+ for field_name in existing - allowed:
+ self.fields.pop(field_name)
+
+ return self.fields
+
+ def to_representation(self, instance):
+ response = super().to_representation(instance)
+
+ # Ensure 'expand' is iterable before processing
+ if self.expand:
+ for expand in self.expand:
+ if expand in self.fields:
+ # Import all the expandable serializers
+ from . import (
+ WorkspaceLiteSerializer,
+ ProjectLiteSerializer,
+ UserLiteSerializer,
+ StateLiteSerializer,
+ IssueSerializer,
+ )
+
+ # Expansion mapper
+ expansion = {
+ "user": UserLiteSerializer,
+ "workspace": WorkspaceLiteSerializer,
+ "project": ProjectLiteSerializer,
+ "default_assignee": UserLiteSerializer,
+ "project_lead": UserLiteSerializer,
+ "state": StateLiteSerializer,
+ "created_by": UserLiteSerializer,
+ "issue": IssueSerializer,
+ "actor": UserLiteSerializer,
+ "owned_by": UserLiteSerializer,
+ "members": UserLiteSerializer,
+ }
+ # Check if field in expansion then expand the field
+ if expand in expansion:
+ if isinstance(response.get(expand), list):
+ exp_serializer = expansion[expand](
+ getattr(instance, expand), many=True
+ )
+ else:
+ exp_serializer = expansion[expand](
+ getattr(instance, expand)
+ )
+ response[expand] = exp_serializer.data
+ else:
+ # You might need to handle this case differently
+ response[expand] = getattr(instance, f"{expand}_id", None)
+
+ return response
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py
index ad214c52a7d..eaff8181a3b 100644
--- a/apiserver/plane/api/serializers/cycle.py
+++ b/apiserver/plane/api/serializers/cycle.py
@@ -1,72 +1,40 @@
-# Django imports
-from django.db.models.functions import TruncDate
-
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .user import UserLiteSerializer
-from .issue import IssueStateSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
-from plane.db.models import Cycle, CycleIssue, CycleFavorite
-
-class CycleWriteSerializer(BaseSerializer):
-
- def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
- raise serializers.ValidationError("Start date cannot exceed end date")
- return data
-
- class Meta:
- model = Cycle
- fields = "__all__"
+from plane.db.models import Cycle, CycleIssue
class CycleSerializer(BaseSerializer):
- owned_by = UserLiteSerializer(read_only=True)
- is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
- assignees = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
-
- def get_assignees(self, obj):
- members = [
- {
- "avatar": assignee.avatar,
- "display_name": assignee.display_name,
- "id": assignee.id,
- }
- for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all()
- for assignee in issue_cycle.issue.assignees.all()
- ]
- # Use a set comprehension to return only the unique objects
- unique_objects = {frozenset(item.items()) for item in members}
-
- # Convert the set back to a list of dictionaries
- unique_list = [dict(item) for item in unique_objects]
-
- return unique_list
class Meta:
model = Cycle
fields = "__all__"
read_only_fields = [
+ "id",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
"workspace",
"project",
"owned_by",
@@ -74,7 +42,6 @@ class Meta:
class CycleIssueSerializer(BaseSerializer):
- issue_detail = IssueStateSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
@@ -87,14 +54,8 @@ class Meta:
]
-class CycleFavoriteSerializer(BaseSerializer):
- cycle_detail = CycleSerializer(source="cycle", read_only=True)
+class CycleLiteSerializer(BaseSerializer):
class Meta:
- model = CycleFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "user",
- ]
+ model = Cycle
+ fields = "__all__"
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/estimate.py b/apiserver/plane/api/serializers/estimate.py
deleted file mode 100644
index 3cb0e4713ac..00000000000
--- a/apiserver/plane/api/serializers/estimate.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Module imports
-from .base import BaseSerializer
-
-from plane.db.models import Estimate, EstimatePoint
-from plane.api.serializers import WorkspaceLiteSerializer, ProjectLiteSerializer
-
-
-class EstimateSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
-
- class Meta:
- model = Estimate
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class EstimatePointSerializer(BaseSerializer):
- class Meta:
- model = EstimatePoint
- fields = "__all__"
- read_only_fields = [
- "estimate",
- "workspace",
- "project",
- ]
-
-
-class EstimateReadSerializer(BaseSerializer):
- points = EstimatePointSerializer(read_only=True, many=True)
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
-
- class Meta:
- model = Estimate
- fields = "__all__"
- read_only_fields = [
- "points",
- "name",
- "description",
- ]
diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py
index ae17b749bfa..17ae8c1ed3a 100644
--- a/apiserver/plane/api/serializers/inbox.py
+++ b/apiserver/plane/api/serializers/inbox.py
@@ -1,58 +1,19 @@
-# Third party frameworks
-from rest_framework import serializers
-
-# Module imports
+# Module improts
from .base import BaseSerializer
-from .issue import IssueFlatSerializer, LabelLiteSerializer
-from .project import ProjectLiteSerializer
-from .state import StateLiteSerializer
-from .project import ProjectLiteSerializer
-from .user import UserLiteSerializer
-from plane.db.models import Inbox, InboxIssue, Issue
-
-
-class InboxSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- pending_issue_count = serializers.IntegerField(read_only=True)
-
- class Meta:
- model = Inbox
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- ]
-
+from plane.db.models import InboxIssue
class InboxIssueSerializer(BaseSerializer):
- issue_detail = IssueFlatSerializer(source="issue", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta:
model = InboxIssue
fields = "__all__"
read_only_fields = [
- "project",
+ "id",
"workspace",
- ]
-
-
-class InboxIssueLiteSerializer(BaseSerializer):
- class Meta:
- model = InboxIssue
- fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
- read_only_fields = fields
-
-
-class IssueStateInboxSerializer(BaseSerializer):
- state_detail = StateLiteSerializer(read_only=True, source="state")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- bridge_id = serializers.UUIDField(read_only=True)
- issue_inbox = InboxIssueLiteSerializer(read_only=True, many=True)
-
- class Meta:
- model = Issue
- fields = "__all__"
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/api/serializers/integration/__init__.py
deleted file mode 100644
index 963fc295e27..00000000000
--- a/apiserver/plane/api/serializers/integration/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
-from .github import (
- GithubRepositorySerializer,
- GithubRepositorySyncSerializer,
- GithubIssueSyncSerializer,
- GithubCommentSyncSerializer,
-)
-from .slack import SlackProjectSyncSerializer
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/integration/base.py b/apiserver/plane/api/serializers/integration/base.py
deleted file mode 100644
index 10ebd462012..00000000000
--- a/apiserver/plane/api/serializers/integration/base.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# Module imports
-from plane.api.serializers import BaseSerializer
-from plane.db.models import Integration, WorkspaceIntegration
-
-
-class IntegrationSerializer(BaseSerializer):
- class Meta:
- model = Integration
- fields = "__all__"
- read_only_fields = [
- "verified",
- ]
-
-
-class WorkspaceIntegrationSerializer(BaseSerializer):
- integration_detail = IntegrationSerializer(read_only=True, source="integration")
-
- class Meta:
- model = WorkspaceIntegration
- fields = "__all__"
diff --git a/apiserver/plane/api/serializers/integration/github.py b/apiserver/plane/api/serializers/integration/github.py
deleted file mode 100644
index 8352dcee143..00000000000
--- a/apiserver/plane/api/serializers/integration/github.py
+++ /dev/null
@@ -1,45 +0,0 @@
-# Module imports
-from plane.api.serializers import BaseSerializer
-from plane.db.models import (
- GithubIssueSync,
- GithubRepository,
- GithubRepositorySync,
- GithubCommentSync,
-)
-
-
-class GithubRepositorySerializer(BaseSerializer):
- class Meta:
- model = GithubRepository
- fields = "__all__"
-
-
-class GithubRepositorySyncSerializer(BaseSerializer):
- repo_detail = GithubRepositorySerializer(source="repository")
-
- class Meta:
- model = GithubRepositorySync
- fields = "__all__"
-
-
-class GithubIssueSyncSerializer(BaseSerializer):
- class Meta:
- model = GithubIssueSync
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- "repository_sync",
- ]
-
-
-class GithubCommentSyncSerializer(BaseSerializer):
- class Meta:
- model = GithubCommentSync
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- "repository_sync",
- "issue_sync",
- ]
diff --git a/apiserver/plane/api/serializers/integration/slack.py b/apiserver/plane/api/serializers/integration/slack.py
deleted file mode 100644
index f535a64de11..00000000000
--- a/apiserver/plane/api/serializers/integration/slack.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Module imports
-from plane.api.serializers import BaseSerializer
-from plane.db.models import SlackProjectSync
-
-
-class SlackProjectSyncSerializer(BaseSerializer):
- class Meta:
- model = SlackProjectSync
- fields = "__all__"
- read_only_fields = [
- "project",
- "workspace",
- "workspace_integration",
- ]
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index 57539f24c47..ab61ae523f4 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -1,96 +1,53 @@
+from lxml import html
+
+
# Django imports
from django.utils import timezone
-# Third Party imports
+# Third party imports
from rest_framework import serializers
# Module imports
-from .base import BaseSerializer
-from .user import UserLiteSerializer
-from .state import StateSerializer, StateLiteSerializer
-from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
-from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
Issue,
- IssueActivity,
- IssueComment,
- IssueProperty,
+ State,
IssueAssignee,
- IssueSubscriber,
- IssueLabel,
Label,
- CycleIssue,
- Cycle,
- Module,
- ModuleIssue,
+ IssueLabel,
IssueLink,
+ IssueComment,
IssueAttachment,
- IssueReaction,
- CommentReaction,
- IssueVote,
- IssueRelation,
+ IssueActivity,
+ ProjectMember,
)
+from .base import BaseSerializer
+from .cycle import CycleSerializer, CycleLiteSerializer
+from .module import ModuleSerializer, ModuleLiteSerializer
+from .user import UserLiteSerializer
+from .state import StateLiteSerializer
-
-class IssueFlatSerializer(BaseSerializer):
- ## Contain only flat fields
-
- class Meta:
- model = Issue
- fields = [
- "id",
- "name",
- "description",
- "description_html",
- "priority",
- "start_date",
- "target_date",
- "sequence_id",
- "sort_order",
- "is_draft",
- ]
-
-
-class IssueProjectLiteSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
- class Meta:
- model = Issue
- fields = [
- "id",
- "project_detail",
- "name",
- "sequence_id",
- ]
- read_only_fields = fields
-
-
-##TODO: Find a better way to write this serializer
-## Find a better approach to save manytomany?
-class IssueCreateSerializer(BaseSerializer):
- state_detail = StateSerializer(read_only=True, source="state")
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- assignees_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+class IssueSerializer(BaseSerializer):
+ assignees = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(
+ queryset=User.objects.values_list("id", flat=True)
+ ),
write_only=True,
required=False,
)
- labels_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
+ labels = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(
+ queryset=Label.objects.values_list("id", flat=True)
+ ),
write_only=True,
required=False,
)
class Meta:
model = Issue
- fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"created_by",
@@ -98,6 +55,10 @@ class Meta:
"created_at",
"updated_at",
]
+ exclude = [
+ "description",
+ "description_stripped",
+ ]
def validate(self, data):
if (
@@ -106,11 +67,58 @@ def validate(self, data):
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
+
+ try:
+ if(data.get("description_html", None) is not None):
+ parsed = html.fromstring(data["description_html"])
+ parsed_str = html.tostring(parsed, encoding='unicode')
+ data["description_html"] = parsed_str
+
+ except Exception as e:
+ raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
+
+ # Validate assignees are from project
+ if data.get("assignees", []):
+ data["assignees"] = ProjectMember.objects.filter(
+ project_id=self.context.get("project_id"),
+ is_active=True,
+ member_id__in=data["assignees"],
+ ).values_list("member_id", flat=True)
+
+ # Validate labels are from project
+ if data.get("labels", []):
+ data["labels"] = Label.objects.filter(
+ project_id=self.context.get("project_id"),
+ id__in=data["labels"],
+ ).values_list("id", flat=True)
+
+ # Check state is from the project only else raise validation error
+ if (
+ data.get("state")
+ and not State.objects.filter(
+ project_id=self.context.get("project_id"), pk=data.get("state")
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "State is not valid please pass a valid state_id"
+ )
+
+ # Check parent issue is from workspace as it can be cross workspace
+ if (
+ data.get("parent")
+ and not Issue.objects.filter(
+ workspace_id=self.context.get("workspace_id"), pk=data.get("parent")
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "Parent is not valid issue_id please pass a valid issue_id"
+ )
+
return data
def create(self, validated_data):
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
@@ -126,14 +134,14 @@ def create(self, validated_data):
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
- assignee=user,
+ assignee_id=assignee_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for user in assignees
+ for assignee_id in assignees
],
batch_size=10,
)
@@ -153,14 +161,14 @@ def create(self, validated_data):
IssueLabel.objects.bulk_create(
[
IssueLabel(
- label=label,
+ label_id=label_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for label in labels
+ for label_id in labels
],
batch_size=10,
)
@@ -168,8 +176,8 @@ def create(self, validated_data):
return issue
def update(self, instance, validated_data):
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
# Related models
project_id = instance.project_id
@@ -182,14 +190,14 @@ def update(self, instance, validated_data):
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
- assignee=user,
+ assignee_id=assignee_id,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for user in assignees
+ for assignee_id in assignees
],
batch_size=10,
)
@@ -199,14 +207,14 @@ def update(self, instance, validated_data):
IssueLabel.objects.bulk_create(
[
IssueLabel(
- label=label,
+ label_id=label_id,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
- for label in labels
+ for label_id in labels
],
batch_size=10,
)
@@ -215,177 +223,34 @@ def update(self, instance, validated_data):
instance.updated_at = timezone.now()
return super().update(instance, validated_data)
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ if "assignees" in self.fields:
+ if "assignees" in self.expand:
+ from .user import UserLiteSerializer
+
+ data["assignees"] = UserLiteSerializer(
+ instance.assignees.all(), many=True
+ ).data
+ else:
+ data["assignees"] = [
+ str(assignee.id) for assignee in instance.assignees.all()
+ ]
+ if "labels" in self.fields:
+ if "labels" in self.expand:
+ data["labels"] = LabelSerializer(instance.labels.all(), many=True).data
+ else:
+ data["labels"] = [str(label.id) for label in instance.labels.all()]
-class IssueActivitySerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
-
- class Meta:
- model = IssueActivity
- fields = "__all__"
-
-
-class IssueCommentSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- class Meta:
- model = IssueComment
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssuePropertySerializer(BaseSerializer):
- class Meta:
- model = IssueProperty
- fields = "__all__"
- read_only_fields = [
- "user",
- "workspace",
- "project",
- ]
+ return data
class LabelSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
class Meta:
model = Label
fields = "__all__"
read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class LabelLiteSerializer(BaseSerializer):
- class Meta:
- model = Label
- fields = [
"id",
- "name",
- "color",
- ]
-
-
-class IssueLabelSerializer(BaseSerializer):
- # label_details = LabelSerializer(read_only=True, source="label")
-
- class Meta:
- model = IssueLabel
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class IssueRelationSerializer(BaseSerializer):
- issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
-
- class Meta:
- model = IssueRelation
- fields = [
- "issue_detail",
- "relation_type",
- "related_issue",
- "issue",
- "id"
- ]
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-class RelatedIssueSerializer(BaseSerializer):
- issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
-
- class Meta:
- model = IssueRelation
- fields = [
- "issue_detail",
- "relation_type",
- "related_issue",
- "issue",
- "id"
- ]
- read_only_fields = [
- "workspace",
- "project",
- ]
-
-
-class IssueAssigneeSerializer(BaseSerializer):
- assignee_details = UserLiteSerializer(read_only=True, source="assignee")
-
- class Meta:
- model = IssueAssignee
- fields = "__all__"
-
-
-class CycleBaseSerializer(BaseSerializer):
- class Meta:
- model = Cycle
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssueCycleDetailSerializer(BaseSerializer):
- cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
-
- class Meta:
- model = CycleIssue
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class ModuleBaseSerializer(BaseSerializer):
- class Meta:
- model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class IssueModuleDetailSerializer(BaseSerializer):
- module_detail = ModuleBaseSerializer(read_only=True, source="module")
-
- class Meta:
- model = ModuleIssue
- fields = "__all__"
- read_only_fields = [
"workspace",
"project",
"created_by",
@@ -396,19 +261,18 @@ class Meta:
class IssueLinkSerializer(BaseSerializer):
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
-
class Meta:
model = IssueLink
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
+ "issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
- "issue",
]
# Validation if url already exists
@@ -427,73 +291,24 @@ class Meta:
model = IssueAttachment
fields = "__all__"
read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
- "workspace",
- "project",
- "issue",
- ]
-
-
-class IssueReactionSerializer(BaseSerializer):
-
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = IssueReaction
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- "actor",
]
-class CommentReactionLiteSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = CommentReaction
- fields = [
- "id",
- "reaction",
- "comment",
- "actor_detail",
- ]
-
-
-class CommentReactionSerializer(BaseSerializer):
- class Meta:
- model = CommentReaction
- fields = "__all__"
- read_only_fields = ["workspace", "project", "comment", "actor"]
-
-
-class IssueVoteSerializer(BaseSerializer):
-
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = IssueVote
- fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
- read_only_fields = fields
-
-
class IssueCommentSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- comment_reactions = CommentReactionLiteSerializer(read_only=True, many=True)
is_member = serializers.BooleanField(read_only=True)
class Meta:
model = IssueComment
- fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"issue",
@@ -502,129 +317,77 @@ class Meta:
"created_at",
"updated_at",
]
-
-
-class IssueStateFlatSerializer(BaseSerializer):
- state_detail = StateLiteSerializer(read_only=True, source="state")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
-
- class Meta:
- model = Issue
- fields = [
- "id",
- "sequence_id",
- "name",
- "state_detail",
- "project_detail",
+ exclude = [
+ "comment_stripped",
+ "comment_json",
]
+ def validate(self, data):
+ try:
+ if(data.get("comment_html", None) is not None):
+ parsed = html.fromstring(data["comment_html"])
+ parsed_str = html.tostring(parsed, encoding='unicode')
+ data["comment_html"] = parsed_str
+
+ except Exception as e:
+ raise serializers.ValidationError(f"Invalid HTML: {str(e)}")
+ return data
-# Issue Serializer with state details
-class IssueStateSerializer(BaseSerializer):
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- state_detail = StateLiteSerializer(read_only=True, source="state")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- bridge_id = serializers.UUIDField(read_only=True)
- attachment_count = serializers.IntegerField(read_only=True)
- link_count = serializers.IntegerField(read_only=True)
+class IssueActivitySerializer(BaseSerializer):
class Meta:
- model = Issue
- fields = "__all__"
+ model = IssueActivity
+ exclude = [
+ "created_by",
+ "updated_by",
+ ]
-class IssueSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateSerializer(read_only=True, source="state")
- parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
- label_details = LabelSerializer(read_only=True, source="labels", many=True)
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
- issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
- issue_cycle = IssueCycleDetailSerializer(read_only=True)
- issue_module = IssueModuleDetailSerializer(read_only=True)
- issue_link = IssueLinkSerializer(read_only=True, many=True)
- issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionSerializer(read_only=True, many=True)
+class CycleIssueSerializer(BaseSerializer):
+ cycle = CycleSerializer(read_only=True)
class Meta:
- model = Issue
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
+ fields = [
+ "cycle",
]
-class IssueLiteSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateLiteSerializer(read_only=True, source="state")
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- sub_issues_count = serializers.IntegerField(read_only=True)
- cycle_id = serializers.UUIDField(read_only=True)
- module_id = serializers.UUIDField(read_only=True)
- attachment_count = serializers.IntegerField(read_only=True)
- link_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionSerializer(read_only=True, many=True)
+class ModuleIssueSerializer(BaseSerializer):
+ module = ModuleSerializer(read_only=True)
class Meta:
- model = Issue
- fields = "__all__"
- read_only_fields = [
- "start_date",
- "target_date",
- "completed_at",
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
+ fields = [
+ "module",
]
-class IssuePublicSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- state_detail = StateLiteSerializer(read_only=True, source="state")
- reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
- votes = IssueVoteSerializer(read_only=True, many=True)
+class LabelLiteSerializer(BaseSerializer):
class Meta:
- model = Issue
+ model = Label
fields = [
"id",
"name",
- "description_html",
- "sequence_id",
- "state",
- "state_detail",
- "project",
- "project_detail",
- "workspace",
- "priority",
- "target_date",
- "reactions",
- "votes",
+ "color",
]
- read_only_fields = fields
+class IssueExpandSerializer(BaseSerializer):
+ cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
+ module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
+ labels = LabelLiteSerializer(read_only=True, many=True)
+ assignees = UserLiteSerializer(read_only=True, many=True)
+ state = StateLiteSerializer(read_only=True)
-class IssueSubscriberSerializer(BaseSerializer):
class Meta:
- model = IssueSubscriber
+ model = Issue
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
- "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
]
diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py
index aaabd4ae071..65710e8afa7 100644
--- a/apiserver/plane/api/serializers/module.py
+++ b/apiserver/plane/api/serializers/module.py
@@ -1,37 +1,38 @@
-# Third Party imports
+# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
-from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
-from .workspace import WorkspaceLiteSerializer
-from .issue import IssueStateSerializer
-
from plane.db.models import (
User,
Module,
+ ModuleLink,
ModuleMember,
ModuleIssue,
- ModuleLink,
- ModuleFavorite,
+ ProjectMember,
)
-class ModuleWriteSerializer(BaseSerializer):
- members_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+class ModuleSerializer(BaseSerializer):
+ members = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(
+ queryset=User.objects.values_list("id", flat=True)
+ ),
write_only=True,
required=False,
)
-
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
+ "id",
"workspace",
"project",
"created_by",
@@ -40,13 +41,29 @@ class Meta:
"updated_at",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["members"] = [str(member.id) for member in instance.members.all()]
+ return data
+
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("target_date", None) is not None
+ and data.get("start_date", None) > data.get("target_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed target date")
- return data
+
+ if data.get("members", []):
+ data["members"] = ProjectMember.objects.filter(
+ project_id=self.context.get("project_id"),
+ member_id__in=data["members"],
+ ).values_list("member_id", flat=True)
+
+ return data
def create(self, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
project = self.context["project"]
@@ -72,7 +89,7 @@ def create(self, validated_data):
return module
def update(self, instance, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
@@ -95,23 +112,7 @@ def update(self, instance, validated_data):
return super().update(instance, validated_data)
-class ModuleFlatSerializer(BaseSerializer):
- class Meta:
- model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
class ModuleIssueSerializer(BaseSerializer):
- module_detail = ModuleFlatSerializer(read_only=True, source="module")
- issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
@@ -129,8 +130,6 @@ class Meta:
class ModuleLinkSerializer(BaseSerializer):
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
-
class Meta:
model = ModuleLink
fields = "__all__"
@@ -153,42 +152,10 @@ def create(self, validated_data):
{"error": "URL already exists for this Issue"}
)
return ModuleLink.objects.create(**validated_data)
+
-
-class ModuleSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- lead_detail = UserLiteSerializer(read_only=True, source="lead")
- members_detail = UserLiteSerializer(read_only=True, many=True, source="members")
- link_module = ModuleLinkSerializer(read_only=True, many=True)
- is_favorite = serializers.BooleanField(read_only=True)
- total_issues = serializers.IntegerField(read_only=True)
- cancelled_issues = serializers.IntegerField(read_only=True)
- completed_issues = serializers.IntegerField(read_only=True)
- started_issues = serializers.IntegerField(read_only=True)
- unstarted_issues = serializers.IntegerField(read_only=True)
- backlog_issues = serializers.IntegerField(read_only=True)
+class ModuleLiteSerializer(BaseSerializer):
class Meta:
model = Module
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
-
-class ModuleFavoriteSerializer(BaseSerializer):
- module_detail = ModuleFlatSerializer(source="module", read_only=True)
-
- class Meta:
- model = ModuleFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "user",
- ]
+ fields = "__all__"
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/page.py b/apiserver/plane/api/serializers/page.py
deleted file mode 100644
index 94f7836de18..00000000000
--- a/apiserver/plane/api/serializers/page.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Third party imports
-from rest_framework import serializers
-
-# Module imports
-from .base import BaseSerializer
-from .issue import IssueFlatSerializer, LabelLiteSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
-from plane.db.models import Page, PageBlock, PageFavorite, PageLabel, Label
-
-
-class PageBlockSerializer(BaseSerializer):
- issue_detail = IssueFlatSerializer(source="issue", read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
-
- class Meta:
- model = PageBlock
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "page",
- ]
-
-class PageBlockLiteSerializer(BaseSerializer):
-
- class Meta:
- model = PageBlock
- fields = "__all__"
-
-
-class PageSerializer(BaseSerializer):
- is_favorite = serializers.BooleanField(read_only=True)
- label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- labels_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
- write_only=True,
- required=False,
- )
- blocks = PageBlockLiteSerializer(read_only=True, many=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
-
- class Meta:
- model = Page
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "owned_by",
- ]
-
- def create(self, validated_data):
- labels = validated_data.pop("labels_list", None)
- project_id = self.context["project_id"]
- owned_by_id = self.context["owned_by_id"]
- page = Page.objects.create(
- **validated_data, project_id=project_id, owned_by_id=owned_by_id
- )
-
- if labels is not None:
- PageLabel.objects.bulk_create(
- [
- PageLabel(
- label=label,
- page=page,
- project_id=project_id,
- workspace_id=page.workspace_id,
- created_by_id=page.created_by_id,
- updated_by_id=page.updated_by_id,
- )
- for label in labels
- ],
- batch_size=10,
- )
- return page
-
- def update(self, instance, validated_data):
- labels = validated_data.pop("labels_list", None)
- if labels is not None:
- PageLabel.objects.filter(page=instance).delete()
- PageLabel.objects.bulk_create(
- [
- PageLabel(
- label=label,
- page=instance,
- project_id=instance.project_id,
- workspace_id=instance.workspace_id,
- created_by_id=instance.created_by_id,
- updated_by_id=instance.updated_by_id,
- )
- for label in labels
- ],
- batch_size=10,
- )
-
- return super().update(instance, validated_data)
-
-
-class PageFavoriteSerializer(BaseSerializer):
- page_detail = PageSerializer(source="page", read_only=True)
-
- class Meta:
- model = PageFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "user",
- ]
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 49d986cae0b..c394a080dd9 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -1,34 +1,61 @@
-# Django imports
-from django.db import IntegrityError
-
# Third party imports
from rest_framework import serializers
# Module imports
+from plane.db.models import Project, ProjectIdentifier, WorkspaceMember, State, Estimate
from .base import BaseSerializer
-from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
-from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
-from plane.db.models import (
- Project,
- ProjectMember,
- ProjectMemberInvite,
- ProjectIdentifier,
- ProjectFavorite,
- ProjectDeployBoard,
- ProjectPublicMember,
-)
class ProjectSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
class Meta:
model = Project
fields = "__all__"
read_only_fields = [
+ "id",
+ 'emoji',
"workspace",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
]
+ def validate(self, data):
+ # Check project lead should be a member of the workspace
+ if (
+ data.get("project_lead", None) is not None
+ and not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("project_lead"),
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "Project lead should be a user in the workspace"
+ )
+
+ # Check default assignee should be a member of the workspace
+ if (
+ data.get("default_assignee", None) is not None
+ and not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("default_assignee"),
+ ).exists()
+ ):
+ raise serializers.ValidationError(
+ "Default assignee should be a user in the workspace"
+ )
+
+ return data
+
def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "":
@@ -38,6 +65,7 @@ def create(self, validated_data):
name=identifier, workspace_id=self.context["workspace_id"]
).exists():
raise serializers.ValidationError(detail="Project Identifier is taken")
+
project = Project.objects.create(
**validated_data, workspace_id=self.context["workspace_id"]
)
@@ -48,36 +76,6 @@ def create(self, validated_data):
)
return project
- def update(self, instance, validated_data):
- identifier = validated_data.get("identifier", "").strip().upper()
-
- # If identifier is not passed update the project and return
- if identifier == "":
- project = super().update(instance, validated_data)
- return project
-
- # If no Project Identifier is found create it
- project_identifier = ProjectIdentifier.objects.filter(
- name=identifier, workspace_id=instance.workspace_id
- ).first()
- if project_identifier is None:
- project = super().update(instance, validated_data)
- project_identifier = ProjectIdentifier.objects.filter(
- project=project
- ).first()
- if project_identifier is not None:
- project_identifier.name = identifier
- project_identifier.save()
- return project
- # If found check if the project_id to be updated and identifier project id is same
- if project_identifier.project_id == instance.id:
- # If same pass update
- project = super().update(instance, validated_data)
- return project
-
- # If not same fail update
- raise serializers.ValidationError(detail="Project Identifier is already taken")
-
class ProjectLiteSerializer(BaseSerializer):
class Meta:
@@ -91,104 +89,4 @@ class Meta:
"emoji",
"description",
]
- read_only_fields = fields
-
-
-class ProjectDetailSerializer(BaseSerializer):
- workspace = WorkSpaceSerializer(read_only=True)
- default_assignee = UserLiteSerializer(read_only=True)
- project_lead = UserLiteSerializer(read_only=True)
- is_favorite = serializers.BooleanField(read_only=True)
- total_members = serializers.IntegerField(read_only=True)
- total_cycles = serializers.IntegerField(read_only=True)
- total_modules = serializers.IntegerField(read_only=True)
- is_member = serializers.BooleanField(read_only=True)
- sort_order = serializers.FloatField(read_only=True)
- member_role = serializers.IntegerField(read_only=True)
- is_deployed = serializers.BooleanField(read_only=True)
-
- class Meta:
- model = Project
- fields = "__all__"
-
-
-class ProjectMemberSerializer(BaseSerializer):
- workspace = WorkspaceLiteSerializer(read_only=True)
- project = ProjectLiteSerializer(read_only=True)
- member = UserLiteSerializer(read_only=True)
-
- class Meta:
- model = ProjectMember
- fields = "__all__"
-
-
-class ProjectMemberAdminSerializer(BaseSerializer):
- workspace = WorkspaceLiteSerializer(read_only=True)
- project = ProjectLiteSerializer(read_only=True)
- member = UserAdminLiteSerializer(read_only=True)
-
- class Meta:
- model = ProjectMember
- fields = "__all__"
-
-
-class ProjectMemberInviteSerializer(BaseSerializer):
- project = ProjectLiteSerializer(read_only=True)
- workspace = WorkspaceLiteSerializer(read_only=True)
-
- class Meta:
- model = ProjectMemberInvite
- fields = "__all__"
-
-
-class ProjectIdentifierSerializer(BaseSerializer):
- class Meta:
- model = ProjectIdentifier
- fields = "__all__"
-
-
-class ProjectFavoriteSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
- class Meta:
- model = ProjectFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "user",
- ]
-
-
-class ProjectMemberLiteSerializer(BaseSerializer):
- member = UserLiteSerializer(read_only=True)
- is_subscribed = serializers.BooleanField(read_only=True)
-
- class Meta:
- model = ProjectMember
- fields = ["member", "id", "is_subscribed"]
- read_only_fields = fields
-
-
-class ProjectDeployBoardSerializer(BaseSerializer):
- project_details = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- class Meta:
- model = ProjectDeployBoard
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project", "anchor",
- ]
-
-
-class ProjectPublicMemberSerializer(BaseSerializer):
-
- class Meta:
- model = ProjectPublicMember
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "member",
- ]
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/state.py b/apiserver/plane/api/serializers/state.py
index 097bc4c931f..9d08193d85c 100644
--- a/apiserver/plane/api/serializers/state.py
+++ b/apiserver/plane/api/serializers/state.py
@@ -1,19 +1,26 @@
# Module imports
from .base import BaseSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
-
from plane.db.models import State
class StateSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ def validate(self, data):
+ # If the default is being provided then make all other states default False
+ if data.get("default", False):
+ State.objects.filter(project_id=self.context.get("project_id")).update(
+ default=False
+ )
+ return data
class Meta:
model = State
fields = "__all__"
read_only_fields = [
+ "id",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
"workspace",
"project",
]
@@ -28,4 +35,4 @@ class Meta:
"color",
"group",
]
- read_only_fields = fields
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py
index dcb00c6cbfe..42b6c39671f 100644
--- a/apiserver/plane/api/serializers/user.py
+++ b/apiserver/plane/api/serializers/user.py
@@ -1,36 +1,6 @@
-# Third party imports
-from rest_framework import serializers
-
-# Module import
-from .base import BaseSerializer
+# Module imports
from plane.db.models import User
-
-
-class UserSerializer(BaseSerializer):
- class Meta:
- model = User
- fields = "__all__"
- read_only_fields = [
- "id",
- "created_at",
- "updated_at",
- "is_superuser",
- "is_staff",
- "last_active",
- "last_login_time",
- "last_logout_time",
- "last_login_ip",
- "last_logout_ip",
- "last_login_uagent",
- "token_updated_at",
- "is_onboarded",
- "is_bot",
- ]
- extra_kwargs = {"password": {"write_only": True}}
-
- # If the user has already filled first name or last name then he is onboarded
- def get_is_onboarded(self, obj):
- return bool(obj.first_name) or bool(obj.last_name)
+from .base import BaseSerializer
class UserLiteSerializer(BaseSerializer):
@@ -41,49 +11,6 @@ class Meta:
"first_name",
"last_name",
"avatar",
- "is_bot",
- "display_name",
- ]
- read_only_fields = [
- "id",
- "is_bot",
- ]
-
-
-class UserAdminLiteSerializer(BaseSerializer):
-
- class Meta:
- model = User
- fields = [
- "id",
- "first_name",
- "last_name",
- "avatar",
- "is_bot",
"display_name",
- "email",
]
- read_only_fields = [
- "id",
- "is_bot",
- ]
-
-
-class ChangePasswordSerializer(serializers.Serializer):
- model = User
-
- """
- Serializer for password change endpoint.
- """
- old_password = serializers.CharField(required=True)
- new_password = serializers.CharField(required=True)
-
-
-class ResetPasswordSerializer(serializers.Serializer):
- model = User
-
- """
- Serializer for password change endpoint.
- """
- new_password = serializers.CharField(required=True)
- confirm_password = serializers.CharField(required=True)
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/api/serializers/view.py
deleted file mode 100644
index a3b6f48be30..00000000000
--- a/apiserver/plane/api/serializers/view.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# Third party imports
-from rest_framework import serializers
-
-# Module imports
-from .base import BaseSerializer
-from .workspace import WorkspaceLiteSerializer
-from .project import ProjectLiteSerializer
-from plane.db.models import GlobalView, IssueView, IssueViewFavorite
-from plane.utils.issue_filters import issue_filters
-
-
-class GlobalViewSerializer(BaseSerializer):
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
-
- class Meta:
- model = GlobalView
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "query",
- ]
-
- def create(self, validated_data):
- query_params = validated_data.get("query_data", {})
- if bool(query_params):
- validated_data["query"] = issue_filters(query_params, "POST")
- else:
- validated_data["query"] = dict()
- return GlobalView.objects.create(**validated_data)
-
- def update(self, instance, validated_data):
- query_params = validated_data.get("query_data", {})
- if bool(query_params):
- validated_data["query"] = issue_filters(query_params, "POST")
- else:
- validated_data["query"] = dict()
- validated_data["query"] = issue_filters(query_params, "PATCH")
- return super().update(instance, validated_data)
-
-
-class IssueViewSerializer(BaseSerializer):
- is_favorite = serializers.BooleanField(read_only=True)
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
- workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
-
- class Meta:
- model = IssueView
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "query",
- ]
-
- def create(self, validated_data):
- query_params = validated_data.get("query_data", {})
- if bool(query_params):
- validated_data["query"] = issue_filters(query_params, "POST")
- else:
- validated_data["query"] = dict()
- return IssueView.objects.create(**validated_data)
-
- def update(self, instance, validated_data):
- query_params = validated_data.get("query_data", {})
- if bool(query_params):
- validated_data["query"] = issue_filters(query_params, "POST")
- else:
- validated_data["query"] = dict()
- validated_data["query"] = issue_filters(query_params, "PATCH")
- return super().update(instance, validated_data)
-
-
-class IssueViewFavoriteSerializer(BaseSerializer):
- view_detail = IssueViewSerializer(source="issue_view", read_only=True)
-
- class Meta:
- model = IssueViewFavorite
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "user",
- ]
diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py
index d27b66481c0..c4c5caceb3b 100644
--- a/apiserver/plane/api/serializers/workspace.py
+++ b/apiserver/plane/api/serializers/workspace.py
@@ -1,39 +1,10 @@
-# Third party imports
-from rest_framework import serializers
-
# Module imports
+from plane.db.models import Workspace
from .base import BaseSerializer
-from .user import UserLiteSerializer, UserAdminLiteSerializer
-
-from plane.db.models import (
- User,
- Workspace,
- WorkspaceMember,
- Team,
- TeamMember,
- WorkspaceMemberInvite,
- WorkspaceTheme,
-)
-
-
-class WorkSpaceSerializer(BaseSerializer):
- owner = UserLiteSerializer(read_only=True)
- total_members = serializers.IntegerField(read_only=True)
- total_issues = serializers.IntegerField(read_only=True)
- class Meta:
- model = Workspace
- fields = "__all__"
- read_only_fields = [
- "id",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- "owner",
- ]
class WorkspaceLiteSerializer(BaseSerializer):
+ """Lite serializer with only required fields"""
class Meta:
model = Workspace
fields = [
@@ -41,91 +12,4 @@ class Meta:
"slug",
"id",
]
- read_only_fields = fields
-
-
-
-class WorkSpaceMemberSerializer(BaseSerializer):
- member = UserLiteSerializer(read_only=True)
- workspace = WorkspaceLiteSerializer(read_only=True)
-
- class Meta:
- model = WorkspaceMember
- fields = "__all__"
-
-
-class WorkspaceMemberAdminSerializer(BaseSerializer):
- member = UserAdminLiteSerializer(read_only=True)
- workspace = WorkspaceLiteSerializer(read_only=True)
-
- class Meta:
- model = WorkspaceMember
- fields = "__all__"
-
-
-class WorkSpaceMemberInviteSerializer(BaseSerializer):
- workspace = WorkSpaceSerializer(read_only=True)
- total_members = serializers.IntegerField(read_only=True)
- created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
-
- class Meta:
- model = WorkspaceMemberInvite
- fields = "__all__"
-
-
-class TeamSerializer(BaseSerializer):
- members_detail = UserLiteSerializer(read_only=True, source="members", many=True)
- members = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
- write_only=True,
- required=False,
- )
-
- class Meta:
- model = Team
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
- def create(self, validated_data, **kwargs):
- if "members" in validated_data:
- members = validated_data.pop("members")
- workspace = self.context["workspace"]
- team = Team.objects.create(**validated_data, workspace=workspace)
- team_members = [
- TeamMember(member=member, team=team, workspace=workspace)
- for member in members
- ]
- TeamMember.objects.bulk_create(team_members, batch_size=10)
- return team
- else:
- team = Team.objects.create(**validated_data)
- return team
-
- def update(self, instance, validated_data):
- if "members" in validated_data:
- members = validated_data.pop("members")
- TeamMember.objects.filter(team=instance).delete()
- team_members = [
- TeamMember(member=member, team=instance, workspace=instance.workspace)
- for member in members
- ]
- TeamMember.objects.bulk_create(team_members, batch_size=10)
- return super().update(instance, validated_data)
- else:
- return super().update(instance, validated_data)
-
-
-class WorkspaceThemeSerializer(BaseSerializer):
- class Meta:
- model = WorkspaceTheme
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "actor",
- ]
+ read_only_fields = fields
\ No newline at end of file
diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py
deleted file mode 100644
index 2213c0d9d11..00000000000
--- a/apiserver/plane/api/urls.py
+++ /dev/null
@@ -1,1749 +0,0 @@
-from django.urls import path
-
-
-# Create your urls here.
-
-from plane.api.views import (
- # Authentication
- SignUpEndpoint,
- SignInEndpoint,
- SignOutEndpoint,
- MagicSignInEndpoint,
- MagicSignInGenerateEndpoint,
- OauthEndpoint,
- ## End Authentication
- # Auth Extended
- ForgotPasswordEndpoint,
- VerifyEmailEndpoint,
- ResetPasswordEndpoint,
- RequestEmailVerificationEndpoint,
- ChangePasswordEndpoint,
- ## End Auth Extender
- # User
- UserEndpoint,
- UpdateUserOnBoardedEndpoint,
- UpdateUserTourCompletedEndpoint,
- UserActivityEndpoint,
- ## End User
- # Workspaces
- WorkSpaceViewSet,
- UserWorkspaceInvitationsEndpoint,
- UserWorkSpacesEndpoint,
- InviteWorkspaceEndpoint,
- JoinWorkspaceEndpoint,
- WorkSpaceMemberViewSet,
- WorkspaceMembersEndpoint,
- WorkspaceInvitationsViewset,
- UserWorkspaceInvitationsEndpoint,
- WorkspaceMemberUserEndpoint,
- WorkspaceMemberUserViewsEndpoint,
- WorkSpaceAvailabilityCheckEndpoint,
- TeamMemberViewSet,
- AddTeamToProjectEndpoint,
- UserLastProjectWithWorkspaceEndpoint,
- UserWorkspaceInvitationEndpoint,
- UserActivityGraphEndpoint,
- UserIssueCompletedGraphEndpoint,
- UserWorkspaceDashboardEndpoint,
- WorkspaceThemeViewSet,
- WorkspaceUserProfileStatsEndpoint,
- WorkspaceUserActivityEndpoint,
- WorkspaceUserProfileEndpoint,
- WorkspaceUserProfileIssuesEndpoint,
- WorkspaceLabelsEndpoint,
- LeaveWorkspaceEndpoint,
- ## End Workspaces
- # File Assets
- FileAssetEndpoint,
- UserAssetsEndpoint,
- ## End File Assets
- # Projects
- ProjectViewSet,
- InviteProjectEndpoint,
- ProjectMemberViewSet,
- ProjectMemberEndpoint,
- ProjectMemberInvitationsViewset,
- ProjectMemberUserEndpoint,
- AddMemberToProjectEndpoint,
- ProjectJoinEndpoint,
- UserProjectInvitationsViewset,
- ProjectIdentifierEndpoint,
- ProjectFavoritesViewSet,
- LeaveProjectEndpoint,
- ProjectPublicCoverImagesEndpoint,
- ## End Projects
- # Issues
- IssueViewSet,
- WorkSpaceIssuesEndpoint,
- IssueActivityEndpoint,
- IssueCommentViewSet,
- UserWorkSpaceIssues,
- BulkDeleteIssuesEndpoint,
- BulkImportIssuesEndpoint,
- ProjectUserViewsEndpoint,
- IssuePropertyViewSet,
- LabelViewSet,
- SubIssuesEndpoint,
- IssueLinkViewSet,
- BulkCreateIssueLabelsEndpoint,
- IssueAttachmentEndpoint,
- IssueArchiveViewSet,
- IssueSubscriberViewSet,
- IssueCommentPublicViewSet,
- IssueReactionViewSet,
- IssueRelationViewSet,
- CommentReactionViewSet,
- IssueDraftViewSet,
- ## End Issues
- # States
- StateViewSet,
- ## End States
- # Estimates
- ProjectEstimatePointEndpoint,
- BulkEstimatePointEndpoint,
- ## End Estimates
- # Views
- GlobalViewViewSet,
- GlobalViewIssuesViewSet,
- IssueViewViewSet,
- ViewIssuesEndpoint,
- IssueViewFavoriteViewSet,
- ## End Views
- # Cycles
- CycleViewSet,
- CycleIssueViewSet,
- CycleDateCheckEndpoint,
- CycleFavoriteViewSet,
- TransferCycleIssueEndpoint,
- ## End Cycles
- # Modules
- ModuleViewSet,
- ModuleIssueViewSet,
- ModuleFavoriteViewSet,
- ModuleLinkViewSet,
- BulkImportModulesEndpoint,
- ## End Modules
- # Pages
- PageViewSet,
- PageBlockViewSet,
- PageFavoriteViewSet,
- CreateIssueFromPageBlockEndpoint,
- ## End Pages
- # Api Tokens
- ApiTokenEndpoint,
- ## End Api Tokens
- # Integrations
- IntegrationViewSet,
- WorkspaceIntegrationViewSet,
- GithubRepositoriesEndpoint,
- GithubRepositorySyncViewSet,
- GithubIssueSyncViewSet,
- GithubCommentSyncViewSet,
- BulkCreateGithubIssueSyncEndpoint,
- SlackProjectSyncViewSet,
- ## End Integrations
- # Importer
- ServiceIssueImportSummaryEndpoint,
- ImportServiceEndpoint,
- UpdateServiceImportStatusEndpoint,
- ## End importer
- # Search
- GlobalSearchEndpoint,
- IssueSearchEndpoint,
- ## End Search
- # External
- GPTIntegrationEndpoint,
- ReleaseNotesEndpoint,
- UnsplashEndpoint,
- ## End External
- # Inbox
- InboxViewSet,
- InboxIssueViewSet,
- ## End Inbox
- # Analytics
- AnalyticsEndpoint,
- AnalyticViewViewset,
- SavedAnalyticEndpoint,
- ExportAnalyticsEndpoint,
- DefaultAnalyticsEndpoint,
- ## End Analytics
- # Notification
- NotificationViewSet,
- UnreadNotificationEndpoint,
- MarkAllReadNotificationViewSet,
- ## End Notification
- # Public Boards
- ProjectDeployBoardViewSet,
- ProjectIssuesPublicEndpoint,
- ProjectDeployBoardPublicSettingsEndpoint,
- IssueReactionPublicViewSet,
- CommentReactionPublicViewSet,
- InboxIssuePublicViewSet,
- IssueVotePublicViewSet,
- WorkspaceProjectDeployBoardEndpoint,
- IssueRetrievePublicEndpoint,
- ## End Public Boards
- ## Exporter
- ExportIssuesEndpoint,
- ## End Exporter
- # Configuration
- ConfigurationEndpoint,
- ## End Configuration
-)
-
-
-urlpatterns = [
- # Social Auth
- path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
- # Auth
- path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
- path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
- path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
- # Magic Sign In/Up
- path(
- "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
- ),
- path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
- # Email verification
- path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
- path(
- "request-email-verify/",
- RequestEmailVerificationEndpoint.as_view(),
- name="request-reset-email",
- ),
- # Password Manipulation
- path(
- "reset-password///",
- ResetPasswordEndpoint.as_view(),
- name="password-reset",
- ),
- path(
- "forgot-password/",
- ForgotPasswordEndpoint.as_view(),
- name="forgot-password",
- ),
- # User Profile
- path(
- "users/me/",
- UserEndpoint.as_view(
- {"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
- ),
- name="users",
- ),
- path(
- "users/me/change-password/",
- ChangePasswordEndpoint.as_view(),
- name="change-password",
- ),
- path(
- "users/me/onboard/",
- UpdateUserOnBoardedEndpoint.as_view(),
- name="user-onboard",
- ),
- path(
- "users/me/tour-completed/",
- UpdateUserTourCompletedEndpoint.as_view(),
- name="user-tour",
- ),
- path(
- "users/workspaces//activities/",
- UserActivityEndpoint.as_view(),
- name="user-activities",
- ),
- # user workspaces
- path(
- "users/me/workspaces/",
- UserWorkSpacesEndpoint.as_view(),
- name="user-workspace",
- ),
- # user workspace invitations
- path(
- "users/me/invitations/workspaces/",
- UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}),
- name="user-workspace-invitations",
- ),
- # user workspace invitation
- path(
- "users/me/invitations//",
- UserWorkspaceInvitationEndpoint.as_view(
- {
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- # user join workspace
- # User Graphs
- path(
- "users/me/workspaces//activity-graph/",
- UserActivityGraphEndpoint.as_view(),
- name="user-activity-graph",
- ),
- path(
- "users/me/workspaces//issues-completed-graph/",
- UserIssueCompletedGraphEndpoint.as_view(),
- name="completed-graph",
- ),
- path(
- "users/me/workspaces//dashboard/",
- UserWorkspaceDashboardEndpoint.as_view(),
- name="user-workspace-dashboard",
- ),
- ## User Graph
- path(
- "users/me/invitations/workspaces///join/",
- JoinWorkspaceEndpoint.as_view(),
- name="user-join-workspace",
- ),
- # user project invitations
- path(
- "users/me/invitations/projects/",
- UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}),
- name="user-project-invitaions",
- ),
- ## Workspaces ##
- path(
- "workspace-slug-check/",
- WorkSpaceAvailabilityCheckEndpoint.as_view(),
- name="workspace-availability",
- ),
- path(
- "workspaces/",
- WorkSpaceViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//",
- WorkSpaceViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//invite/",
- InviteWorkspaceEndpoint.as_view(),
- name="workspace",
- ),
- path(
- "workspaces//invitations/",
- WorkspaceInvitationsViewset.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//invitations//",
- WorkspaceInvitationsViewset.as_view(
- {
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//members/",
- WorkSpaceMemberViewSet.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//members//",
- WorkSpaceMemberViewSet.as_view(
- {
- "patch": "partial_update",
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//workspace-members/",
- WorkspaceMembersEndpoint.as_view(),
- name="workspace-members",
- ),
- path(
- "workspaces//teams/",
- TeamMemberViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//teams//",
- TeamMemberViewSet.as_view(
- {
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "users/last-visited-workspace/",
- UserLastProjectWithWorkspaceEndpoint.as_view(),
- name="workspace-project-details",
- ),
- path(
- "workspaces//workspace-members/me/",
- WorkspaceMemberUserEndpoint.as_view(),
- name="workspace-member-details",
- ),
- path(
- "workspaces//workspace-views/",
- WorkspaceMemberUserViewsEndpoint.as_view(),
- name="workspace-member-details",
- ),
- path(
- "workspaces//workspace-themes/",
- WorkspaceThemeViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace-themes",
- ),
- path(
- "workspaces//workspace-themes//",
- WorkspaceThemeViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="workspace-themes",
- ),
- path(
- "workspaces//user-stats//",
- WorkspaceUserProfileStatsEndpoint.as_view(),
- name="workspace-user-stats",
- ),
- path(
- "workspaces//user-activity//",
- WorkspaceUserActivityEndpoint.as_view(),
- name="workspace-user-activity",
- ),
- path(
- "workspaces//user-profile//",
- WorkspaceUserProfileEndpoint.as_view(),
- name="workspace-user-profile-page",
- ),
- path(
- "workspaces//user-issues//",
- WorkspaceUserProfileIssuesEndpoint.as_view(),
- name="workspace-user-profile-issues",
- ),
- path(
- "workspaces//labels/",
- WorkspaceLabelsEndpoint.as_view(),
- name="workspace-labels",
- ),
- path(
- "workspaces//members/leave/",
- LeaveWorkspaceEndpoint.as_view(),
- name="workspace-labels",
- ),
- ## End Workspaces ##
- # Projects
- path(
- "workspaces//projects/",
- ProjectViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//",
- ProjectViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//project-identifiers/",
- ProjectIdentifierEndpoint.as_view(),
- name="project-identifiers",
- ),
- path(
- "workspaces//projects//invite/",
- InviteProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//members/",
- ProjectMemberViewSet.as_view({"get": "list"}),
- name="project",
- ),
- path(
- "workspaces//projects//members//",
- ProjectMemberViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//project-members/",
- ProjectMemberEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//members/add/",
- AddMemberToProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects/join/",
- ProjectJoinEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//team-invite/",
- AddTeamToProjectEndpoint.as_view(),
- name="projects",
- ),
- path(
- "workspaces//projects//invitations/",
- ProjectMemberInvitationsViewset.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//projects//invitations//",
- ProjectMemberInvitationsViewset.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//project-views/",
- ProjectUserViewsEndpoint.as_view(),
- name="project-view",
- ),
- path(
- "workspaces//projects//project-members/me/",
- ProjectMemberUserEndpoint.as_view(),
- name="project-view",
- ),
- path(
- "workspaces//user-favorite-projects/",
- ProjectFavoritesViewSet.as_view(
- {
- "post": "create",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//user-favorite-projects//",
- ProjectFavoritesViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//members/leave/",
- LeaveProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "project-covers/",
- ProjectPublicCoverImagesEndpoint.as_view(),
- name="project-covers",
- ),
- # End Projects
- # States
- path(
- "workspaces//projects//states/",
- StateViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-states",
- ),
- path(
- "workspaces//projects//states//",
- StateViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-state",
- ),
- # End States ##
- # Estimates
- path(
- "workspaces//projects//project-estimates/",
- ProjectEstimatePointEndpoint.as_view(),
- name="project-estimate-points",
- ),
- path(
- "workspaces//projects//estimates/",
- BulkEstimatePointEndpoint.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="bulk-create-estimate-points",
- ),
- path(
- "workspaces//projects//estimates//",
- BulkEstimatePointEndpoint.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="bulk-create-estimate-points",
- ),
- # End Estimates ##
- # Views
- path(
- "workspaces//projects//views/",
- IssueViewViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-view",
- ),
- path(
- "workspaces//projects//views//",
- IssueViewViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-view",
- ),
- path(
- "workspaces//projects//views//issues/",
- ViewIssuesEndpoint.as_view(),
- name="project-view-issues",
- ),
- path(
- "workspaces//views/",
- GlobalViewViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="global-view",
- ),
- path(
- "workspaces//views//",
- GlobalViewViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="global-view",
- ),
- path(
- "workspaces//issues/",
- GlobalViewIssuesViewSet.as_view(
- {
- "get": "list",
- }
- ),
- name="global-view-issues",
- ),
- path(
- "workspaces//projects//user-favorite-views/",
- IssueViewFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-view",
- ),
- path(
- "workspaces//projects//user-favorite-views//",
- IssueViewFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-view",
- ),
- ## End Views
- ## Cycles
- path(
- "workspaces//projects//cycles/",
- CycleViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//",
- CycleViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//cycle-issues/",
- CycleIssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//cycle-issues//",
- CycleIssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles/date-check/",
- CycleDateCheckEndpoint.as_view(),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//user-favorite-cycles/",
- CycleFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-cycle",
- ),
- path(
- "workspaces//projects//user-favorite-cycles//",
- CycleFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-cycle",
- ),
- path(
- "workspaces//projects//cycles//transfer-issues/",
- TransferCycleIssueEndpoint.as_view(),
- name="transfer-issues",
- ),
- ## End Cycles
- # Issue
- path(
- "workspaces//projects//issues/",
- IssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue",
- ),
- path(
- "workspaces//projects//issues//",
- IssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue",
- ),
- path(
- "workspaces//projects//issue-labels/",
- LabelViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-labels",
- ),
- path(
- "workspaces//projects//issue-labels//",
- LabelViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-labels",
- ),
- path(
- "workspaces//projects//bulk-create-labels/",
- BulkCreateIssueLabelsEndpoint.as_view(),
- name="project-bulk-labels",
- ),
- path(
- "workspaces//projects//bulk-delete-issues/",
- BulkDeleteIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- path(
- "workspaces//projects//bulk-import-issues//",
- BulkImportIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- path(
- "workspaces//my-issues/",
- UserWorkSpaceIssues.as_view(),
- name="workspace-issues",
- ),
- path(
- "workspaces//projects//issues//sub-issues/",
- SubIssuesEndpoint.as_view(),
- name="sub-issues",
- ),
- path(
- "workspaces//projects//issues//issue-links/",
- IssueLinkViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-links",
- ),
- path(
- "workspaces//projects//issues//issue-links//",
- IssueLinkViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-links",
- ),
- path(
- "workspaces//projects//issues//issue-attachments/",
- IssueAttachmentEndpoint.as_view(),
- name="project-issue-attachments",
- ),
- path(
- "workspaces//projects//issues//issue-attachments//",
- IssueAttachmentEndpoint.as_view(),
- name="project-issue-attachments",
- ),
- path(
- "workspaces//export-issues/",
- ExportIssuesEndpoint.as_view(),
- name="export-issues",
- ),
- ## End Issues
- ## Issue Activity
- path(
- "workspaces//projects//issues//history/",
- IssueActivityEndpoint.as_view(),
- name="project-issue-history",
- ),
- ## Issue Activity
- ## IssueComments
- path(
- "workspaces//projects//issues//comments/",
- IssueCommentViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-comment",
- ),
- path(
- "workspaces//projects//issues//comments//",
- IssueCommentViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-comment",
- ),
- ## End IssueComments
- # Issue Subscribers
- path(
- "workspaces//projects//issues//issue-subscribers/",
- IssueSubscriberViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-subscribers",
- ),
- path(
- "workspaces//projects//issues//issue-subscribers//",
- IssueSubscriberViewSet.as_view({"delete": "destroy"}),
- name="project-issue-subscribers",
- ),
- path(
- "workspaces//projects//issues//subscribe/",
- IssueSubscriberViewSet.as_view(
- {
- "get": "subscription_status",
- "post": "subscribe",
- "delete": "unsubscribe",
- }
- ),
- name="project-issue-subscribers",
- ),
- ## End Issue Subscribers
- # Issue Reactions
- path(
- "workspaces//projects//issues//reactions/",
- IssueReactionViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-reactions",
- ),
- path(
- "workspaces//projects//issues//reactions//",
- IssueReactionViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project-issue-reactions",
- ),
- ## End Issue Reactions
- # Comment Reactions
- path(
- "workspaces//projects//comments//reactions/",
- CommentReactionViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-comment-reactions",
- ),
- path(
- "workspaces//projects//comments//reactions//",
- CommentReactionViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project-issue-comment-reactions",
- ),
- ## End Comment Reactions
- ## IssueProperty
- path(
- "workspaces//projects//issue-properties/",
- IssuePropertyViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-roadmap",
- ),
- path(
- "workspaces//projects//issue-properties//",
- IssuePropertyViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-roadmap",
- ),
- ## IssueProperty Ebd
- ## Issue Archives
- path(
- "workspaces//projects//archived-issues/",
- IssueArchiveViewSet.as_view(
- {
- "get": "list",
- }
- ),
- name="project-issue-archive",
- ),
- path(
- "workspaces//projects//archived-issues//",
- IssueArchiveViewSet.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="project-issue-archive",
- ),
- path(
- "workspaces//projects//unarchive//",
- IssueArchiveViewSet.as_view(
- {
- "post": "unarchive",
- }
- ),
- name="project-issue-archive",
- ),
- ## End Issue Archives
- ## Issue Relation
- path(
- "workspaces//projects//issues//issue-relation/",
- IssueRelationViewSet.as_view(
- {
- "post": "create",
- }
- ),
- name="issue-relation",
- ),
- path(
- "workspaces//projects//issues//issue-relation//",
- IssueRelationViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="issue-relation",
- ),
- ## End Issue Relation
- ## Issue Drafts
- path(
- "workspaces//projects//issue-drafts/",
- IssueDraftViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-draft",
- ),
- path(
- "workspaces//projects//issue-drafts//",
- IssueDraftViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-draft",
- ),
- ## End Issue Drafts
- ## File Assets
- path(
- "workspaces//file-assets/",
- FileAssetEndpoint.as_view(),
- name="file-assets",
- ),
- path(
- "workspaces/file-assets///",
- FileAssetEndpoint.as_view(),
- name="file-assets",
- ),
- path(
- "users/file-assets/",
- UserAssetsEndpoint.as_view(),
- name="user-file-assets",
- ),
- path(
- "users/file-assets//",
- UserAssetsEndpoint.as_view(),
- name="user-file-assets",
- ),
- ## End File Assets
- ## Modules
- path(
- "workspaces//projects//modules/",
- ModuleViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-modules",
- ),
- path(
- "workspaces//projects//modules//",
- ModuleViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-modules",
- ),
- path(
- "workspaces//projects//modules//module-issues/",
- ModuleIssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-module-issues",
- ),
- path(
- "workspaces//projects//modules//module-issues//",
- ModuleIssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-module-issues",
- ),
- path(
- "workspaces//projects//modules//module-links/",
- ModuleLinkViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-module-links",
- ),
- path(
- "workspaces//projects//modules//module-links//",
- ModuleLinkViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-module-links",
- ),
- path(
- "workspaces//projects//user-favorite-modules/",
- ModuleFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-module",
- ),
- path(
- "workspaces//projects//user-favorite-modules//",
- ModuleFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-module",
- ),
- path(
- "workspaces//projects//bulk-import-modules//",
- BulkImportModulesEndpoint.as_view(),
- name="bulk-modules-create",
- ),
- ## End Modules
- # Pages
- path(
- "workspaces//projects//pages/",
- PageViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-pages",
- ),
- path(
- "workspaces//projects//pages/