diff --git a/.deepsource.toml b/.deepsource.toml
new file mode 100644
index 00000000000..85de1a5e8d4
--- /dev/null
+++ b/.deepsource.toml
@@ -0,0 +1,17 @@
+version = 1
+
+[[analyzers]]
+name = "shell"
+
+[[analyzers]]
+name = "javascript"
+
+ [analyzers.meta]
+ plugins = ["react"]
+ environment = ["nodejs"]
+
+[[analyzers]]
+name = "python"
+
+ [analyzers.meta]
+ runtime_version = "3.x.x"
\ No newline at end of file
diff --git a/.env.example b/.env.example
index 1d95c56a067..082aa753b80 100644
--- a/.env.example
+++ b/.env.example
@@ -1,36 +1,3 @@
-# Frontend
-# Extra image domains that need to be added for Next Image
-NEXT_PUBLIC_EXTRA_IMAGE_DOMAINS=
-# Google Client ID for Google OAuth
-NEXT_PUBLIC_GOOGLE_CLIENTID=""
-# Github ID for Github OAuth
-NEXT_PUBLIC_GITHUB_ID=""
-# Github App Name for GitHub Integration
-NEXT_PUBLIC_GITHUB_APP_NAME=""
-# Sentry DSN for error monitoring
-NEXT_PUBLIC_SENTRY_DSN=""
-# Enable/Disable OAUTH - default 0 for selfhosted instance
-NEXT_PUBLIC_ENABLE_OAUTH=0
-# Enable/Disable sentry
-NEXT_PUBLIC_ENABLE_SENTRY=0
-# Enable/Disable session recording
-NEXT_PUBLIC_ENABLE_SESSION_RECORDER=0
-# Enable/Disable event tracking
-NEXT_PUBLIC_TRACK_EVENTS=0
-# Slack for Slack Integration
-NEXT_PUBLIC_SLACK_CLIENT_ID=""
-# For Telemetry, set it to "app.plane.so"
-NEXT_PUBLIC_PLAUSIBLE_DOMAIN=""
-# public boards deploy url
-NEXT_PUBLIC_DEPLOY_URL=""
-
-# Backend
-# Debug value for api server use it as 0 for production use
-DEBUG=0
-
-# Error logs
-SENTRY_DSN=""
-
# Database Settings
PGUSER="plane"
PGPASSWORD="plane"
@@ -43,15 +10,6 @@ REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
-# Email Settings
-EMAIL_HOST=""
-EMAIL_HOST_USER=""
-EMAIL_HOST_PASSWORD=""
-EMAIL_PORT=587
-EMAIL_FROM="Team Plane "
-EMAIL_USE_TLS="1"
-EMAIL_USE_SSL="0"
-
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
@@ -67,9 +25,6 @@ OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
OPENAI_API_KEY="sk-" # add your openai key here
GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
-# Github
-GITHUB_CLIENT_SECRET="" # For fetching release notes
-
# Settings related to Docker
DOCKERIZED=1
# set to 1 If using the pre-configured minio setup
@@ -78,10 +33,3 @@ USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
-# Default Creds
-DEFAULT_EMAIL="captain@plane.so"
-DEFAULT_PASSWORD="password123"
-
-# SignUps
-ENABLE_SIGNUP="1"
-# Auto generated and Required that will be generated from setup.sh
diff --git a/.eslintrc.js b/.eslintrc.js
index 463c86901c0..c229c095269 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -4,7 +4,7 @@ module.exports = {
extends: ["custom"],
settings: {
next: {
- rootDir: ["apps/*"],
+ rootDir: ["web/", "space/"],
},
},
};
diff --git a/.github/workflows/Build_Test_Pull_Request.yml b/.github/workflows/Build_Test_Pull_Request.yml
deleted file mode 100644
index 0dbca646a88..00000000000
--- a/.github/workflows/Build_Test_Pull_Request.yml
+++ /dev/null
@@ -1,55 +0,0 @@
-name: Build Pull Request Contents
-
-on:
- pull_request:
- types: ["opened", "synchronize"]
-
-jobs:
- build-pull-request-contents:
- name: Build Pull Request Contents
- runs-on: ubuntu-20.04
- permissions:
- pull-requests: read
-
- steps:
- - name: Checkout Repository to Actions
- uses: actions/checkout@v3.3.0
-
- - name: Setup Node.js 18.x
- uses: actions/setup-node@v2
- with:
- node-version: 18.x
- cache: 'yarn'
-
- - name: Get changed files
- id: changed-files
- uses: tj-actions/changed-files@v38
- with:
- files_yaml: |
- apiserver:
- - apiserver/**
- web:
- - apps/app/**
- deploy:
- - apps/space/**
-
- - name: Setup .npmrc for repository
- run: |
- echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc
-
- - name: Build Plane's Main App
- if: steps.changed-files.outputs.web_any_changed == 'true'
- run: |
- mv ./.npmrc ./apps/app
- cd apps/app
- yarn
- yarn build
-
- - name: Build Plane's Deploy App
- if: steps.changed-files.outputs.deploy_any_changed == 'true'
- run: |
- cd apps/space
- yarn
- yarn build
-
-
diff --git a/.github/workflows/Update_Docker_Images.yml b/.github/workflows/Update_Docker_Images.yml
deleted file mode 100644
index 8e27e098f98..00000000000
--- a/.github/workflows/Update_Docker_Images.yml
+++ /dev/null
@@ -1,111 +0,0 @@
-name: Update Docker Images for Plane on Release
-
-on:
- release:
- types: [released]
-
-jobs:
- build_push_backend:
- name: Build and Push Api Server Docker Image
- runs-on: ubuntu-20.04
-
- steps:
- - name: Check out the repo
- uses: actions/checkout@v3.3.0
-
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2.5.0
-
- - name: Login to Docker Hub
- uses: docker/login-action@v2.1.0
- with:
- username: ${{ secrets.DOCKERHUB_USERNAME }}
- password: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Setup .npmrc for repository
- run: |
- echo -e "@tiptap-pro:registry=https://registry.tiptap.dev/\n//registry.tiptap.dev/:_authToken=${{ secrets.TIPTAP_TOKEN }}" > .npmrc
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaFrontend
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend
- tags: |
- type=ref,event=tag
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaBackend
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend
- tags: |
- type=ref,event=tag
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaDeploy
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-deploy
- tags: |
- type=ref,event=tag
-
- - name: Extract metadata (tags, labels) for Docker (Docker Hub) from Github Release
- id: metaProxy
- uses: docker/metadata-action@v4.3.0
- with:
- images: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy
- tags: |
- type=ref,event=tag
-
- - name: Build and Push Frontend to Docker Container Registry
- uses: docker/build-push-action@v4.0.0
- with:
- context: .
- file: ./apps/app/Dockerfile.web
- platforms: linux/amd64
- tags: ${{ steps.metaFrontend.outputs.tags }}
- push: true
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and Push Backend to Docker Hub
- uses: docker/build-push-action@v4.0.0
- with:
- context: ./apiserver
- file: ./apiserver/Dockerfile.api
- platforms: linux/amd64
- push: true
- tags: ${{ steps.metaBackend.outputs.tags }}
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and Push Plane-Deploy to Docker Hub
- uses: docker/build-push-action@v4.0.0
- with:
- context: .
- file: ./apps/space/Dockerfile.space
- platforms: linux/amd64
- push: true
- tags: ${{ steps.metaDeploy.outputs.tags }}
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
-
- - name: Build and Push Plane-Proxy to Docker Hub
- uses: docker/build-push-action@v4.0.0
- with:
- context: ./nginx
- file: ./nginx/Dockerfile
- platforms: linux/amd64
- push: true
- tags: ${{ steps.metaProxy.outputs.tags }}
- env:
- DOCKER_BUILDKIT: 1
- DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
- DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
new file mode 100644
index 00000000000..26b8addd23b
--- /dev/null
+++ b/.github/workflows/build-branch.yml
@@ -0,0 +1,205 @@
+
+name: Docker Branch Build
+
+on:
+ workflow_dispatch:
+ inputs:
+ logLevel:
+ description: 'Log level'
+ required: true
+ default: 'warning'
+ tags:
+ description: 'Dev/QA Builds'
+
+env:
+ gh_branch: ${{ github.ref_name }}
+ img_tag: latest
+
+jobs:
+ branch_build_and_push:
+ name: Build-Push Web/Space/API/Proxy Docker Image
+ runs-on: ubuntu-20.04
+
+ steps:
+ - name: Check out the repo
+ uses: actions/checkout@v3.3.0
+
+ - uses: ASzc/change-string-case-action@v2
+ id: gh_branch_upper_lower
+ with:
+ string: ${{ env.gh_branch }}
+
+ - uses: mad9000/actions-find-and-replace-string@2
+ id: gh_branch_replace_slash
+ with:
+ source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
+ find: '/'
+ replace: '-'
+
+ - uses: mad9000/actions-find-and-replace-string@2
+ id: gh_branch_replace_dot
+ with:
+ source: ${{ steps.gh_branch_replace_slash.outputs.value }}
+ find: '.'
+ replace: ''
+
+ - uses: mad9000/actions-find-and-replace-string@2
+ id: gh_branch_clean
+ with:
+ source: ${{ steps.gh_branch_replace_dot.outputs.value }}
+ find: '_'
+ replace: ''
+ - name: Uploading Proxy Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: proxy-src-code
+ path: ./nginx
+ - name: Uploading Backend Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: backend-src-code
+ path: ./apiserver
+ - name: Uploading Web Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: web-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./space
+
+ - name: Uploading Space Source
+ uses: actions/upload-artifact@v3
+ with:
+ name: space-src-code
+ path: |
+ ./
+ !./apiserver
+ !./nginx
+ !./deploy
+ !./web
+ outputs:
+ gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
+
+ branch_build_push_frontend:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Web Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: web-src-code
+
+ - name: Build and Push Frontend to Docker Container Registry
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./web/Dockerfile.web
+ platforms: linux/amd64
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_space:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Space Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: space-src-code
+
+ - name: Build and Push Space to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./space/Dockerfile.space
+ platforms: linux/amd64
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_backend:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+ - name: Downloading Backend Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: backend-src-code
+
+ - name: Build and Push Backend to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile.api
+ platforms: linux/amd64
+ push: true
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ branch_build_push_proxy:
+ runs-on: ubuntu-20.04
+ needs: [ branch_build_and_push ]
+ steps:
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2.5.0
+
+ - name: Login to Docker Hub
+ uses: docker/login-action@v2.1.0
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Downloading Proxy Source Code
+ uses: actions/download-artifact@v3
+ with:
+ name: proxy-src-code
+
+ - name: Build and Push Plane-Proxy to Docker Hub
+ uses: docker/build-push-action@v4.0.0
+ with:
+ context: .
+ file: ./Dockerfile
+ platforms: linux/amd64
+ tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
+ push: true
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKET_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
\ No newline at end of file
diff --git a/.github/workflows/build-test-pull-request.yml b/.github/workflows/build-test-pull-request.yml
new file mode 100644
index 00000000000..c74975f48ef
--- /dev/null
+++ b/.github/workflows/build-test-pull-request.yml
@@ -0,0 +1,48 @@
+name: Build Pull Request Contents
+
+on:
+ pull_request:
+ types: ["opened", "synchronize"]
+
+jobs:
+ build-pull-request-contents:
+ name: Build Pull Request Contents
+ runs-on: ubuntu-20.04
+ permissions:
+ pull-requests: read
+
+ steps:
+ - name: Checkout Repository to Actions
+ uses: actions/checkout@v3.3.0
+
+ - name: Setup Node.js 18.x
+ uses: actions/setup-node@v2
+ with:
+ node-version: 18.x
+ cache: 'yarn'
+
+ - name: Get changed files
+ id: changed-files
+ uses: tj-actions/changed-files@v38
+ with:
+ files_yaml: |
+ apiserver:
+ - apiserver/**
+ web:
+ - web/**
+ deploy:
+ - space/**
+
+ - name: Build Plane's Main App
+ if: steps.changed-files.outputs.web_any_changed == 'true'
+ run: |
+ yarn
+ yarn build --filter=web
+
+ - name: Build Plane's Deploy App
+ if: steps.changed-files.outputs.deploy_any_changed == 'true'
+ run: |
+ yarn
+ yarn build --filter=space
+
+
diff --git a/.github/workflows/create-sync-pr.yml b/.github/workflows/create-sync-pr.yml
new file mode 100644
index 00000000000..c8e27f32216
--- /dev/null
+++ b/.github/workflows/create-sync-pr.yml
@@ -0,0 +1,79 @@
+name: Create PR in Plane EE Repository to sync the changes
+
+on:
+ pull_request:
+ branches:
+ - master
+ types:
+ - closed
+
+jobs:
+ create_pr:
+ # Only run the job when a PR is merged
+ if: github.event.pull_request.merged == true
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ contents: read
+ steps:
+ - name: Check SOURCE_REPO
+ id: check_repo
+ env:
+ SOURCE_REPO: ${{ secrets.SOURCE_REPO_NAME }}
+ run: |
+ echo "::set-output name=is_correct_repo::$(if [[ "$SOURCE_REPO" == "makeplane/plane" ]]; then echo 'true'; else echo 'false'; fi)"
+
+ - name: Checkout Code
+ if: steps.check_repo.outputs.is_correct_repo == 'true'
+ uses: actions/checkout@v2
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+
+ - name: Set up Branch Name
+ if: steps.check_repo.outputs.is_correct_repo == 'true'
+ run: |
+ echo "SOURCE_BRANCH_NAME=${{ github.head_ref }}" >> $GITHUB_ENV
+
+ - name: Setup GH CLI
+ if: steps.check_repo.outputs.is_correct_repo == 'true'
+ run: |
+ type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
+ curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
+ sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
+ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
+ sudo apt update
+ sudo apt install gh -y
+
+ - name: Create Pull Request
+ if: steps.check_repo.outputs.is_correct_repo == 'true'
+ env:
+ GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
+ run: |
+ TARGET_REPO="${{ secrets.TARGET_REPO_NAME }}"
+ TARGET_BRANCH="${{ secrets.TARGET_REPO_BRANCH }}"
+ SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
+
+ git checkout $SOURCE_BRANCH
+ git remote add target "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
+ git push target $SOURCE_BRANCH:$SOURCE_BRANCH
+
+ PR_TITLE="${{ github.event.pull_request.title }}"
+ PR_BODY="${{ github.event.pull_request.body }}"
+
+ # Remove double quotes
+ PR_TITLE_CLEANED="${PR_TITLE//\"/}"
+ PR_BODY_CLEANED="${PR_BODY//\"/}"
+
+ # Construct PR_BODY_CONTENT using a here-document
+ PR_BODY_CONTENT=$(cat <> ./web/.env
+```
+
+```bash
+echo "\nNEXT_PUBLIC_API_BASE_URL=http://localhost\n" >> ./space/.env
+```
+
+4. Run Docker compose up
+
+```bash
+docker compose up -d
+```
+
+5. Install dependencies
+
+```bash
+yarn install
+```
+
+6. Run the web app in development mode
+
+```bash
+yarn dev
+```
+
## Missing a Feature?
If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
@@ -39,8 +81,8 @@ If you would like to _implement_ it, an issue with your proposal must be submitt
To ensure consistency throughout the source code, please keep these rules in mind as you are working:
-- All features or bug fixes must be tested by one or more specs (unit-tests).
-- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
+- All features or bug fixes must be tested by one or more specs (unit-tests).
+- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
## Need help? Questions and suggestions
@@ -48,11 +90,11 @@ Questions, suggestions, and thoughts are most welcome. We can also be reached in
## Ways to contribute
-- Try Plane Cloud and the self hosting platform and give feedback
-- Add new integrations
-- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
-- Share your thoughts and suggestions with us
-- Help create tutorials and blog posts
-- Request a feature by submitting a proposal
-- Report a bug
-- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
+- Try Plane Cloud and the self hosting platform and give feedback
+- Add new integrations
+- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
+- Share your thoughts and suggestions with us
+- Help create tutorials and blog posts
+- Request a feature by submitting a proposal
+- Report a bug
+- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
diff --git a/Dockerfile b/Dockerfile
index 1b059b5e043..1a71801f63b 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -52,7 +52,7 @@ ENV DJANGO_SETTINGS_MODULE plane.settings.production
ENV DOCKERIZED 1
WORKDIR /code
-
+RUN sed -i 's/dl-cdn.alpinelinux.org/mirror.tuna.tsinghua.edu.cn/g' /etc/apk/repositories
RUN apk --no-cache add \
"libpq~=15" \
"libxslt~=1.1" \
diff --git a/ENV_SETUP.md b/ENV_SETUP.md
new file mode 100644
index 00000000000..6796c3db6b1
--- /dev/null
+++ b/ENV_SETUP.md
@@ -0,0 +1,134 @@
+# Environment Variables
+
+Environment variables are distributed in various files. Please refer them carefully.
+
+## {PROJECT_FOLDER}/.env
+File is available in the project root folder
+
+```
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
+OPENAI_API_KEY="sk-" # add your openai key here
+GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+
+# Settings related to Docker
+DOCKERIZED=1
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+```
+
+## {PROJECT_FOLDER}/web/.env.example
+
+```
+# Enable/Disable OAUTH - default 0 for selfhosted instance
+NEXT_PUBLIC_ENABLE_OAUTH=0
+# Public boards deploy URL
+NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
+```
+
+## {PROJECT_FOLDER}/spaces/.env.example
+
+```
+# Flag to toggle OAuth
+NEXT_PUBLIC_ENABLE_OAUTH=0
+```
+
+## {PROJECT_FOLDER}/apiserver/.env
+
+```
+# Backend
+# Debug value for api server use it as 0 for production use
+DEBUG=0
+DJANGO_SETTINGS_MODULE="plane.settings.selfhosted"
+
+# Error logs
+SENTRY_DSN=""
+
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# Email Settings
+EMAIL_HOST=""
+EMAIL_HOST_USER=""
+EMAIL_HOST_PASSWORD=""
+EMAIL_PORT=587
+EMAIL_FROM="Team Plane "
+EMAIL_USE_TLS="1"
+EMAIL_USE_SSL="0"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
+OPENAI_API_KEY="sk-" # add your openai key here
+GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+
+# Github
+GITHUB_CLIENT_SECRET="" # For fetching release notes
+
+# Settings related to Docker
+DOCKERIZED=1
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+
+# Default Creds
+DEFAULT_EMAIL="captain@plane.so"
+DEFAULT_PASSWORD="password123"
+
+# SignUps
+ENABLE_SIGNUP="1"
+
+# Email Redirection URL
+WEB_URL="http://localhost"
+```
+## Updates
+- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
+- The naming convention for containers and images has been updated.
+- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.
+- The Tiptap pro-extension dependency has been removed, eliminating the need for Tiptap API keys.
+- The image name for Plane deployment has been changed to plane-space.
diff --git a/README.md b/README.md
index 2bc2764f3bc..53679943ba3 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
Plane
-Open-source, self-hosted project planning tool
+Flexible, extensible open-source project management
@@ -35,61 +35,51 @@
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind 🧘♀️.
-
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account. Plane Cloud offers a hosted solution for Plane. If you prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/self-hosting).
+## ⚡️ Contributors Quick Start
-## ⚡️ Quick start with Docker Compose
+### Prerequisite
-### Docker Compose Setup
+Development system must have docker engine installed and running.
-- Clone the repository
+### Steps
-```bash
-git clone https://github.com/makeplane/plane
-cd plane
-chmod +x setup.sh
-```
+Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
-- Run setup.sh
+1. Clone the code locally using `git clone https://github.com/makeplane/plane.git`
+1. Switch to the code folder `cd plane`
+1. Create your feature or fix branch you plan to work on using `git checkout -b `
+1. Open terminal and run `./setup.sh`
+1. Open the code on VSCode or similar equivalent IDE
+1. Review the `.env` files available in various folders. Visit [Environment Setup](./ENV_SETUP.md) to know about various environment variables used in system
+1. Run the docker command to initiate various services `docker compose -f docker-compose-local.yml up -d`
```bash
-./setup.sh http://localhost
+./setup.sh
```
-> If running in a cloud env replace localhost with public facing IP address of the VM
+You are ready to make changes to the code. Do not forget to refresh the browser (in case id does not auto-reload)
-- Setup Tiptap Pro
+Thats it!
- Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free).
+## 🍙 Self Hosting
- Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro.
-
-```
-@tiptap-pro:registry=https://registry.tiptap.dev/
-//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN
-```
-- Run Docker compose up
-
-```bash
-docker compose up -d
-```
-
-You can use the default email and password for your first login `captain@plane.so` and `password123`.
+For self hosting environment setup, visit the [Self Hosting](https://docs.plane.so/self-hosting) documentation page
## 🚀 Features
-* **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
-* **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
-* **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
-* **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
-* **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
-* **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
-* **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
-* **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
-* **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
+- **Issue Planning and Tracking**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to issues for better organization and tracking.
+- **Issue Attachments**: Collaborate effectively by attaching files to issues, making it easy for your team to find and share important project-related documents.
+- **Layouts**: Customize your project view with your preferred layout - choose from List, Kanban, or Calendar to visualize your project in a way that makes sense to you.
+- **Cycles**: Plan sprints with Cycles to keep your team on track and productive. Gain insights into your project's progress with burn-down charts and other useful features.
+- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to easily track and plan your project's progress.
+- **Views**: Create custom filters to display only the issues that matter to you. Save and share your filters in just a few clicks.
+- **Pages**: Plane pages function as an AI-powered notepad, allowing you to easily document issues, cycle plans, and module details, and then synchronize them with your issues.
+- **Command K**: Enjoy a better user experience with the new Command + K menu. Easily manage and navigate through your projects from one convenient location.
+- **GitHub Sync**: Streamline your planning process by syncing your GitHub issues with Plane. Keep all your issues in one place for better tracking and collaboration.
## 📸 Screenshots
@@ -150,7 +140,6 @@ docker compose up -d
-
## 📚Documentation
For full documentation, visit [docs.plane.so](https://docs.plane.so/)
diff --git a/apiserver/.env.example b/apiserver/.env.example
new file mode 100644
index 00000000000..8193b5e7716
--- /dev/null
+++ b/apiserver/.env.example
@@ -0,0 +1,72 @@
+# Backend
+# Debug value for api server use it as 0 for production use
+DEBUG=0
+DJANGO_SETTINGS_MODULE="plane.settings.production"
+
+# Error logs
+SENTRY_DSN=""
+
+# Database Settings
+PGUSER="plane"
+PGPASSWORD="plane"
+PGHOST="plane-db"
+PGDATABASE="plane"
+DATABASE_URL=postgresql://${PGUSER}:${PGPASSWORD}@${PGHOST}/${PGDATABASE}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# Email Settings
+EMAIL_HOST=""
+EMAIL_HOST_USER=""
+EMAIL_HOST_PASSWORD=""
+EMAIL_PORT=587
+EMAIL_FROM="Team Plane "
+EMAIL_USE_TLS="1"
+EMAIL_USE_SSL="0"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the nginx.conf for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # change if using a custom endpoint
+OPENAI_API_KEY="sk-" # add your openai key here
+GPT_ENGINE="gpt-3.5-turbo" # use "gpt-4" if you have access
+
+# Github
+GITHUB_CLIENT_SECRET="" # For fetching release notes
+
+# Settings related to Docker
+DOCKERIZED=1
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# Nginx Configuration
+NGINX_PORT=80
+
+# Default Creds
+DEFAULT_EMAIL="captain@plane.so"
+DEFAULT_PASSWORD="password123"
+
+# SignUps
+ENABLE_SIGNUP="1"
+
+
+# Enable Email/Password Signup
+ENABLE_EMAIL_PASSWORD="1"
+
+# Enable Magic link Login
+ENABLE_MAGIC_LINK_LOGIN="0"
+
+# Email redirections and minio domain settings
+WEB_URL="http://localhost"
+
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 7da5f9ddaf1..cd1ee966768 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -8,7 +8,6 @@ ENV PIP_DISABLE_PIP_VERSION_CHECK=1
WORKDIR /code
RUN sed -i 's/dl-cdn.alpinelinux.org/mirror.tuna.tsinghua.edu.cn/g' /etc/apk/repositories
-
RUN apk --no-cache add \
"libpq~=15" \
"libxslt~=1.1" \
diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev
new file mode 100644
index 00000000000..cdfd2d50dcb
--- /dev/null
+++ b/apiserver/Dockerfile.dev
@@ -0,0 +1,51 @@
+FROM python:3.11.1-alpine3.17 AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+RUN sed -i 's/dl-cdn.alpinelinux.org/mirror.tuna.tsinghua.edu.cn/g' /etc/apk/repositories
+RUN apk --no-cache add \
+ "bash~=5.2" \
+ "libpq~=15" \
+ "libxslt~=1.1" \
+ "nodejs-current~=19" \
+ "xmlsec~=1.2" \
+ "libffi-dev" \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
+ "git~=2" \
+ "make~=4.3" \
+ "postgresql13-dev~=13" \
+ "libc-dev" \
+ "linux-headers"
+
+WORKDIR /code
+
+COPY requirements.txt ./requirements.txt
+ADD requirements ./requirements
+
+RUN pip install -i https://pypi.tuna.tsinghua.edu.cn/simple --trusted-host pypi.tuna.tsinghua.edu.cn -r requirements.txt --compile --no-cache-dir
+RUN addgroup -S plane && \
+ adduser -S captain -G plane
+
+RUN chown captain.plane /code
+
+USER captain
+
+# Add in Django deps and generate Django's static files
+
+USER root
+
+# RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
+RUN chmod -R 777 /code
+
+USER captain
+
+# Expose container port and run entry point script
+EXPOSE 8000
+
+# CMD [ "./bin/takeoff" ]
+
diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py
index e115b20b8c0..a356f2ec92b 100644
--- a/apiserver/bin/user_script.py
+++ b/apiserver/bin/user_script.py
@@ -1,4 +1,4 @@
-import os, sys, random, string
+import os, sys
import uuid
sys.path.append("/code")
diff --git a/apiserver/gunicorn.config.py b/apiserver/gunicorn.config.py
index 67205b5ec94..51c2a548871 100644
--- a/apiserver/gunicorn.config.py
+++ b/apiserver/gunicorn.config.py
@@ -3,4 +3,4 @@
def post_fork(server, worker):
patch_psycopg()
- worker.log.info("Made Psycopg2 Green")
\ No newline at end of file
+ worker.log.info("Made Psycopg2 Green")
diff --git a/apiserver/plane/api/permissions/project.py b/apiserver/plane/api/permissions/project.py
index e4e3e0f9bc3..4f907dbd6fa 100644
--- a/apiserver/plane/api/permissions/project.py
+++ b/apiserver/plane/api/permissions/project.py
@@ -101,4 +101,4 @@ def has_permission(self, request, view):
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
- ).exists()
\ No newline at end of file
+ ).exists()
diff --git a/apiserver/plane/api/permissions/workspace.py b/apiserver/plane/api/permissions/workspace.py
index d01b545ee18..66e8366146c 100644
--- a/apiserver/plane/api/permissions/workspace.py
+++ b/apiserver/plane/api/permissions/workspace.py
@@ -58,8 +58,17 @@ def has_permission(self, request, view):
if request.user.is_anonymous:
return False
+ ## Safe Methods -> Handle the filtering logic in queryset
+ if request.method in SAFE_METHODS:
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ ).exists()
+
return WorkspaceMember.objects.filter(
- member=request.user, workspace__slug=view.workspace_slug
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role__in=[Owner, Admin],
).exists()
diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py
index 2dc910cafcd..f1a7de3b81c 100644
--- a/apiserver/plane/api/serializers/__init__.py
+++ b/apiserver/plane/api/serializers/__init__.py
@@ -1,5 +1,13 @@
from .base import BaseSerializer
-from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer
+from .user import (
+ UserSerializer,
+ UserLiteSerializer,
+ ChangePasswordSerializer,
+ ResetPasswordSerializer,
+ UserAdminLiteSerializer,
+ UserMeSerializer,
+ UserMeSettingsSerializer,
+)
from .workspace import (
WorkSpaceSerializer,
WorkSpaceMemberSerializer,
@@ -8,9 +16,11 @@
WorkspaceLiteSerializer,
WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer,
+ WorkspaceMemberMeSerializer,
)
from .project import (
ProjectSerializer,
+ ProjectListSerializer,
ProjectDetailSerializer,
ProjectMemberSerializer,
ProjectMemberInviteSerializer,
@@ -20,19 +30,22 @@
ProjectMemberLiteSerializer,
ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer,
- ProjectPublicMemberSerializer
+ ProjectPublicMemberSerializer,
)
from .state import StateSerializer, StateLiteSerializer
-from .view import IssueViewSerializer, IssueViewFavoriteSerializer
-from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer
+from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer
+from .cycle import (
+ CycleSerializer,
+ CycleIssueSerializer,
+ CycleFavoriteSerializer,
+ CycleWriteSerializer,
+)
from .asset import FileAssetSerializer
from .issue import (
IssueCreateSerializer,
IssueActivitySerializer,
IssueCommentSerializer,
IssuePropertySerializer,
- BlockerIssueSerializer,
- BlockedIssueSerializer,
IssueAssigneeSerializer,
LabelSerializer,
IssueSerializer,
@@ -45,6 +58,8 @@
IssueReactionSerializer,
CommentReactionSerializer,
IssueVoteSerializer,
+ IssueRelationSerializer,
+ RelatedIssueSerializer,
IssuePublicSerializer,
)
diff --git a/apiserver/plane/api/serializers/analytic.py b/apiserver/plane/api/serializers/analytic.py
index 5f35e111787..9f3ee6d0a24 100644
--- a/apiserver/plane/api/serializers/analytic.py
+++ b/apiserver/plane/api/serializers/analytic.py
@@ -17,7 +17,7 @@ def create(self, validated_data):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
@@ -25,6 +25,6 @@ def update(self, instance, validated_data):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py
index 0c6bba46823..89c9725d951 100644
--- a/apiserver/plane/api/serializers/base.py
+++ b/apiserver/plane/api/serializers/base.py
@@ -3,3 +3,56 @@
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+class DynamicBaseSerializer(BaseSerializer):
+
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", None)
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields is not None:
+ self.fields = self._filter_fields(fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ # Convert the current serializer's fields and the allowed fields to sets.
+ existing = set(self.fields)
+ allowed = set(allowed)
+
+ # Remove fields from the serializer that aren't in the 'allowed' list.
+ for field_name in (existing - allowed):
+ self.fields.pop(field_name)
+
+ return self.fields
diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py
index 66436803333..104a3dd067a 100644
--- a/apiserver/plane/api/serializers/cycle.py
+++ b/apiserver/plane/api/serializers/cycle.py
@@ -1,6 +1,3 @@
-# Django imports
-from django.db.models.functions import TruncDate
-
# Third party imports
from rest_framework import serializers
@@ -12,10 +9,14 @@
from .project import ProjectLiteSerializer
from plane.db.models import Cycle, CycleIssue, CycleFavorite
-class CycleWriteSerializer(BaseSerializer):
+class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
@@ -34,7 +35,6 @@ class CycleSerializer(BaseSerializer):
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
assignees = serializers.SerializerMethodField(read_only=True)
- labels = serializers.SerializerMethodField(read_only=True)
total_estimates = serializers.IntegerField(read_only=True)
completed_estimates = serializers.IntegerField(read_only=True)
started_estimates = serializers.IntegerField(read_only=True)
@@ -42,19 +42,24 @@ class CycleSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
def validate(self, data):
- if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
raise serializers.ValidationError("Start date cannot exceed end date")
return data
-
+
def get_assignees(self, obj):
members = [
{
"avatar": assignee.avatar,
- "first_name": assignee.first_name,
"display_name": assignee.display_name,
"id": assignee.id,
}
- for issue_cycle in obj.issue_cycle.all()
+ for issue_cycle in obj.issue_cycle.prefetch_related(
+ "issue__assignees"
+ ).all()
for assignee in issue_cycle.issue.assignees.all()
]
# Use a set comprehension to return only the unique objects
@@ -64,24 +69,6 @@ def get_assignees(self, obj):
unique_list = [dict(item) for item in unique_objects]
return unique_list
-
- def get_labels(self, obj):
- labels = [
- {
- "name": label.name,
- "color": label.color,
- "id": label.id,
- }
- for issue_cycle in obj.issue_cycle.all()
- for label in issue_cycle.issue.labels.all()
- ]
- # Use a set comprehension to return only the unique objects
- unique_objects = {frozenset(item.items()) for item in labels}
-
- # Convert the set back to a list of dictionaries
- unique_list = [dict(item) for item in unique_objects]
-
- return unique_list
class Meta:
model = Cycle
diff --git a/apiserver/plane/api/serializers/inbox.py b/apiserver/plane/api/serializers/inbox.py
index ae17b749bfa..f52a90660be 100644
--- a/apiserver/plane/api/serializers/inbox.py
+++ b/apiserver/plane/api/serializers/inbox.py
@@ -6,7 +6,6 @@
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
-from .project import ProjectLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Inbox, InboxIssue, Issue
diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/api/serializers/integration/__init__.py
index 963fc295e27..112ff02d162 100644
--- a/apiserver/plane/api/serializers/integration/__init__.py
+++ b/apiserver/plane/api/serializers/integration/__init__.py
@@ -5,4 +5,4 @@
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
-from .slack import SlackProjectSyncSerializer
\ No newline at end of file
+from .slack import SlackProjectSyncSerializer
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index 2a75b2f48fb..f061a0a1938 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -8,8 +8,7 @@
from .base import BaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
-from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
+from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
@@ -17,12 +16,10 @@
IssueActivity,
IssueComment,
IssueProperty,
- IssueBlocker,
IssueAssignee,
IssueSubscriber,
IssueLabel,
Label,
- IssueBlocker,
CycleIssue,
Cycle,
Module,
@@ -32,6 +29,7 @@
IssueReaction,
CommentReaction,
IssueVote,
+ IssueRelation,
)
@@ -50,6 +48,7 @@ class Meta:
"target_date",
"sequence_id",
"sort_order",
+ "is_draft",
]
@@ -75,31 +74,18 @@ class IssueCreateSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
- assignees_list = serializers.ListField(
+ assignees = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
- # List of issues that are blocking this issue
- blockers_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()),
- write_only=True,
- required=False,
- )
- labels_list = serializers.ListField(
+ labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
- # List of issues that are blocked by this issue
- blocks_list = serializers.ListField(
- child=serializers.PrimaryKeyRelatedField(queryset=Issue.objects.all()),
- write_only=True,
- required=False,
- )
-
class Meta:
model = Issue
fields = "__all__"
@@ -112,6 +98,12 @@ class Meta:
"updated_at",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['assignees'] = [str(assignee.id) for assignee in instance.assignees.all()]
+ data['labels'] = [str(label.id) for label in instance.labels.all()]
+ return data
+
def validate(self, data):
if (
data.get("start_date", None) is not None
@@ -122,10 +114,8 @@ def validate(self, data):
return data
def create(self, validated_data):
- blockers = validated_data.pop("blockers_list", None)
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
- blocks = validated_data.pop("blocks_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
@@ -137,22 +127,6 @@ def create(self, validated_data):
created_by_id = issue.created_by_id
updated_by_id = issue.updated_by_id
- if blockers is not None and len(blockers):
- IssueBlocker.objects.bulk_create(
- [
- IssueBlocker(
- block=issue,
- blocked_by=blocker,
- project_id=project_id,
- workspace_id=workspace_id,
- created_by_id=created_by_id,
- updated_by_id=updated_by_id,
- )
- for blocker in blockers
- ],
- batch_size=10,
- )
-
if assignees is not None and len(assignees):
IssueAssignee.objects.bulk_create(
[
@@ -196,29 +170,11 @@ def create(self, validated_data):
batch_size=10,
)
- if blocks is not None and len(blocks):
- IssueBlocker.objects.bulk_create(
- [
- IssueBlocker(
- block=block,
- blocked_by=issue,
- project_id=project_id,
- workspace_id=workspace_id,
- created_by_id=created_by_id,
- updated_by_id=updated_by_id,
- )
- for block in blocks
- ],
- batch_size=10,
- )
-
return issue
def update(self, instance, validated_data):
- blockers = validated_data.pop("blockers_list", None)
- assignees = validated_data.pop("assignees_list", None)
- labels = validated_data.pop("labels_list", None)
- blocks = validated_data.pop("blocks_list", None)
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
# Related models
project_id = instance.project_id
@@ -226,23 +182,6 @@ def update(self, instance, validated_data):
created_by_id = instance.created_by_id
updated_by_id = instance.updated_by_id
- if blockers is not None:
- IssueBlocker.objects.filter(block=instance).delete()
- IssueBlocker.objects.bulk_create(
- [
- IssueBlocker(
- block=instance,
- blocked_by=blocker,
- project_id=project_id,
- workspace_id=workspace_id,
- created_by_id=created_by_id,
- updated_by_id=updated_by_id,
- )
- for blocker in blockers
- ],
- batch_size=10,
- )
-
if assignees is not None:
IssueAssignee.objects.filter(issue=instance).delete()
IssueAssignee.objects.bulk_create(
@@ -277,23 +216,6 @@ def update(self, instance, validated_data):
batch_size=10,
)
- if blocks is not None:
- IssueBlocker.objects.filter(blocked_by=instance).delete()
- IssueBlocker.objects.bulk_create(
- [
- IssueBlocker(
- block=block,
- blocked_by=instance,
- project_id=project_id,
- workspace_id=workspace_id,
- created_by_id=created_by_id,
- updated_by_id=updated_by_id,
- )
- for block in blocks
- ],
- batch_size=10,
- )
-
# Time updation occues even when other related models are updated
instance.updated_at = timezone.now()
return super().update(instance, validated_data)
@@ -309,25 +231,6 @@ class Meta:
fields = "__all__"
-class IssueCommentSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
- issue_detail = IssueFlatSerializer(read_only=True, source="issue")
- project_detail = ProjectLiteSerializer(read_only=True, source="project")
- workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
-
- class Meta:
- model = IssueComment
- fields = "__all__"
- read_only_fields = [
- "workspace",
- "project",
- "issue",
- "created_by",
- "updated_by",
- "created_at",
- "updated_at",
- ]
-
class IssuePropertySerializer(BaseSerializer):
class Meta:
@@ -364,7 +267,6 @@ class Meta:
class IssueLabelSerializer(BaseSerializer):
- # label_details = LabelSerializer(read_only=True, source="label")
class Meta:
model = IssueLabel
@@ -375,32 +277,39 @@ class Meta:
]
-class BlockedIssueSerializer(BaseSerializer):
- blocked_issue_detail = IssueProjectLiteSerializer(source="block", read_only=True)
+class IssueRelationSerializer(BaseSerializer):
+ issue_detail = IssueProjectLiteSerializer(read_only=True, source="related_issue")
class Meta:
- model = IssueBlocker
+ model = IssueRelation
fields = [
- "blocked_issue_detail",
- "blocked_by",
- "block",
+ "issue_detail",
+ "relation_type",
+ "related_issue",
+ "issue",
+ "id"
+ ]
+ read_only_fields = [
+ "workspace",
+ "project",
]
- read_only_fields = fields
-
-class BlockerIssueSerializer(BaseSerializer):
- blocker_issue_detail = IssueProjectLiteSerializer(
- source="blocked_by", read_only=True
- )
+class RelatedIssueSerializer(BaseSerializer):
+ issue_detail = IssueProjectLiteSerializer(read_only=True, source="issue")
class Meta:
- model = IssueBlocker
+ model = IssueRelation
fields = [
- "blocker_issue_detail",
- "blocked_by",
- "block",
+ "issue_detail",
+ "relation_type",
+ "related_issue",
+ "issue",
+ "id"
+ ]
+ read_only_fields = [
+ "workspace",
+ "project",
]
- read_only_fields = fields
class IssueAssigneeSerializer(BaseSerializer):
@@ -514,6 +423,9 @@ class Meta:
class IssueReactionSerializer(BaseSerializer):
+
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
class Meta:
model = IssueReaction
fields = "__all__"
@@ -525,19 +437,6 @@ class Meta:
]
-class IssueReactionLiteSerializer(BaseSerializer):
- actor_detail = UserLiteSerializer(read_only=True, source="actor")
-
- class Meta:
- model = IssueReaction
- fields = [
- "id",
- "reaction",
- "issue",
- "actor_detail",
- ]
-
-
class CommentReactionLiteSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
@@ -559,9 +458,12 @@ class Meta:
class IssueVoteSerializer(BaseSerializer):
+
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
class Meta:
model = IssueVote
- fields = ["issue", "vote", "workspace_id", "project_id", "actor"]
+ fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
read_only_fields = fields
@@ -624,16 +526,14 @@ class IssueSerializer(BaseSerializer):
parent_detail = IssueStateFlatSerializer(read_only=True, source="parent")
label_details = LabelSerializer(read_only=True, source="labels", many=True)
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
- # List of issues blocked by this issue
- blocked_issues = BlockedIssueSerializer(read_only=True, many=True)
- # List of issues that block this issue
- blocker_issues = BlockerIssueSerializer(read_only=True, many=True)
+ related_issues = IssueRelationSerializer(read_only=True, source="issue_relation", many=True)
+ issue_relations = RelatedIssueSerializer(read_only=True, source="issue_related", many=True)
issue_cycle = IssueCycleDetailSerializer(read_only=True)
issue_module = IssueModuleDetailSerializer(read_only=True)
issue_link = IssueLinkSerializer(read_only=True, many=True)
issue_attachment = IssueAttachmentSerializer(read_only=True, many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
+ issue_reactions = IssueReactionSerializer(read_only=True, many=True)
class Meta:
model = Issue
@@ -659,7 +559,7 @@ class IssueLiteSerializer(BaseSerializer):
module_id = serializers.UUIDField(read_only=True)
attachment_count = serializers.IntegerField(read_only=True)
link_count = serializers.IntegerField(read_only=True)
- issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
+ issue_reactions = IssueReactionSerializer(read_only=True, many=True)
class Meta:
model = Issue
@@ -680,7 +580,8 @@ class Meta:
class IssuePublicSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state")
- issue_reactions = IssueReactionLiteSerializer(read_only=True, many=True)
+ reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
+ votes = IssueVoteSerializer(read_only=True, many=True)
class Meta:
model = Issue
@@ -696,11 +597,13 @@ class Meta:
"workspace",
"priority",
"target_date",
- "issue_reactions",
+ "reactions",
+ "votes",
]
read_only_fields = fields
+
class IssueSubscriberSerializer(BaseSerializer):
class Meta:
model = IssueSubscriber
diff --git a/apiserver/plane/api/serializers/module.py b/apiserver/plane/api/serializers/module.py
index aaabd4ae071..48f773b0f81 100644
--- a/apiserver/plane/api/serializers/module.py
+++ b/apiserver/plane/api/serializers/module.py
@@ -4,9 +4,8 @@
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
-from .project import ProjectSerializer, ProjectLiteSerializer
+from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
-from .issue import IssueStateSerializer
from plane.db.models import (
User,
@@ -19,7 +18,7 @@
class ModuleWriteSerializer(BaseSerializer):
- members_list = serializers.ListField(
+ members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
@@ -39,6 +38,11 @@ class Meta:
"created_at",
"updated_at",
]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['members'] = [str(member.id) for member in instance.members.all()]
+ return data
def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
@@ -46,7 +50,7 @@ def validate(self, data):
return data
def create(self, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
project = self.context["project"]
@@ -72,7 +76,7 @@ def create(self, validated_data):
return module
def update(self, instance, validated_data):
- members = validated_data.pop("members_list", None)
+ members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
diff --git a/apiserver/plane/api/serializers/page.py b/apiserver/plane/api/serializers/page.py
index 94f7836de18..abdf958cb10 100644
--- a/apiserver/plane/api/serializers/page.py
+++ b/apiserver/plane/api/serializers/page.py
@@ -33,7 +33,7 @@ class Meta:
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
- labels_list = serializers.ListField(
+ labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@@ -50,9 +50,13 @@ class Meta:
"project",
"owned_by",
]
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data['labels'] = [str(label.id) for label in instance.labels.all()]
+ return data
def create(self, validated_data):
- labels = validated_data.pop("labels_list", None)
+ labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
@@ -77,7 +81,7 @@ def create(self, validated_data):
return page
def update(self, instance, validated_data):
- labels = validated_data.pop("labels_list", None)
+ labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(
diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py
index 49d986cae0b..36fa6ecca7c 100644
--- a/apiserver/plane/api/serializers/project.py
+++ b/apiserver/plane/api/serializers/project.py
@@ -1,11 +1,8 @@
-# Django imports
-from django.db import IntegrityError
-
# Third party imports
from rest_framework import serializers
# Module imports
-from .base import BaseSerializer
+from .base import BaseSerializer, DynamicBaseSerializer
from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer
from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
@@ -94,8 +91,33 @@ class Meta:
read_only_fields = fields
+class ProjectListSerializer(DynamicBaseSerializer):
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
+ members = serializers.SerializerMethodField()
+
+ def get_members(self, obj):
+ project_members = ProjectMember.objects.filter(project_id=obj.id).values(
+ "id",
+ "member_id",
+ "member__display_name",
+ "member__avatar",
+ )
+ return project_members
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+
+
class ProjectDetailSerializer(BaseSerializer):
- workspace = WorkSpaceSerializer(read_only=True)
+ # workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
@@ -148,8 +170,6 @@ class Meta:
class ProjectFavoriteSerializer(BaseSerializer):
- project_detail = ProjectLiteSerializer(source="project", read_only=True)
-
class Meta:
model = ProjectFavorite
fields = "__all__"
@@ -178,12 +198,12 @@ class Meta:
fields = "__all__"
read_only_fields = [
"workspace",
- "project", "anchor",
+ "project",
+ "anchor",
]
class ProjectPublicMemberSerializer(BaseSerializer):
-
class Meta:
model = ProjectPublicMember
fields = "__all__"
diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py
index dcb00c6cbfe..b8f9dedd43d 100644
--- a/apiserver/plane/api/serializers/user.py
+++ b/apiserver/plane/api/serializers/user.py
@@ -3,7 +3,7 @@
# Module import
from .base import BaseSerializer
-from plane.db.models import User
+from plane.db.models import User, Workspace, WorkspaceMemberInvite
class UserSerializer(BaseSerializer):
@@ -33,6 +33,81 @@ def get_is_onboarded(self, obj):
return bool(obj.first_name) or bool(obj.last_name)
+class UserMeSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "avatar",
+ "cover_image",
+ "date_joined",
+ "display_name",
+ "email",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_bot",
+ "is_email_verified",
+ "is_managed",
+ "is_onboarded",
+ "is_tour_completed",
+ "mobile_number",
+ "role",
+ "onboarding_step",
+ "user_timezone",
+ "username",
+ "theme",
+ "last_workspace_id",
+ ]
+ read_only_fields = fields
+
+
+class UserMeSettingsSerializer(BaseSerializer):
+ workspace = serializers.SerializerMethodField()
+
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "email",
+ "workspace",
+ ]
+ read_only_fields = fields
+
+ def get_workspace(self, obj):
+ workspace_invites = WorkspaceMemberInvite.objects.filter(
+ email=obj.email
+ ).count()
+ if obj.last_workspace_id is not None:
+ workspace = Workspace.objects.filter(
+ pk=obj.last_workspace_id, workspace_member__member=obj.id
+ ).first()
+ return {
+ "last_workspace_id": obj.last_workspace_id,
+ "last_workspace_slug": workspace.slug if workspace is not None else "",
+ "fallback_workspace_id": obj.last_workspace_id,
+ "fallback_workspace_slug": workspace.slug if workspace is not None else "",
+ "invites": workspace_invites,
+ }
+ else:
+ fallback_workspace = (
+ Workspace.objects.filter(workspace_member__member_id=obj.id)
+ .order_by("created_at")
+ .first()
+ )
+ return {
+ "last_workspace_id": None,
+ "last_workspace_slug": None,
+ "fallback_workspace_id": fallback_workspace.id
+ if fallback_workspace is not None
+ else None,
+ "fallback_workspace_slug": fallback_workspace.slug
+ if fallback_workspace is not None
+ else None,
+ "invites": workspace_invites,
+ }
+
+
class UserLiteSerializer(BaseSerializer):
class Meta:
model = User
@@ -51,7 +126,6 @@ class Meta:
class UserAdminLiteSerializer(BaseSerializer):
-
class Meta:
model = User
fields = [
diff --git a/apiserver/plane/api/serializers/view.py b/apiserver/plane/api/serializers/view.py
index 076228ae098..e7502609a72 100644
--- a/apiserver/plane/api/serializers/view.py
+++ b/apiserver/plane/api/serializers/view.py
@@ -5,10 +5,39 @@
from .base import BaseSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
-from plane.db.models import IssueView, IssueViewFavorite
+from plane.db.models import GlobalView, IssueView, IssueViewFavorite
from plane.utils.issue_filters import issue_filters
+class GlobalViewSerializer(BaseSerializer):
+ workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+
+ class Meta:
+ model = GlobalView
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "query",
+ ]
+
+ def create(self, validated_data):
+ query_params = validated_data.get("query_data", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = dict()
+ return GlobalView.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ query_params = validated_data.get("query_data", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = dict()
+ validated_data["query"] = issue_filters(query_params, "PATCH")
+ return super().update(instance, validated_data)
+
+
class IssueViewSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
@@ -28,7 +57,7 @@ def create(self, validated_data):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
@@ -36,7 +65,7 @@ def update(self, instance, validated_data):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
- validated_data["query"] = dict()
+ validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)
diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py
index d27b66481c0..0a80ce8b7be 100644
--- a/apiserver/plane/api/serializers/workspace.py
+++ b/apiserver/plane/api/serializers/workspace.py
@@ -54,6 +54,13 @@ class Meta:
fields = "__all__"
+class WorkspaceMemberMeSerializer(BaseSerializer):
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
class WorkspaceMemberAdminSerializer(BaseSerializer):
member = UserAdminLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
@@ -103,9 +110,8 @@ def create(self, validated_data, **kwargs):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return team
- else:
- team = Team.objects.create(**validated_data)
- return team
+ team = Team.objects.create(**validated_data)
+ return team
def update(self, instance, validated_data):
if "members" in validated_data:
@@ -117,8 +123,7 @@ def update(self, instance, validated_data):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return super().update(instance, validated_data)
- else:
- return super().update(instance, validated_data)
+ return super().update(instance, validated_data)
class WorkspaceThemeSerializer(BaseSerializer):
diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py
deleted file mode 100644
index 1fb2b8e905c..00000000000
--- a/apiserver/plane/api/urls.py
+++ /dev/null
@@ -1,1643 +0,0 @@
-from django.urls import path
-
-
-# Create your urls here.
-
-from plane.api.views import (
- # Authentication
- SignUpEndpoint,
- SignInEndpoint,
- SignOutEndpoint,
- MagicSignInEndpoint,
- MagicSignInGenerateEndpoint,
- OauthEndpoint,
- ## End Authentication
- # Auth Extended
- ForgotPasswordEndpoint,
- VerifyEmailEndpoint,
- ResetPasswordEndpoint,
- RequestEmailVerificationEndpoint,
- ChangePasswordEndpoint,
- ## End Auth Extender
- # User
- UserEndpoint,
- UpdateUserOnBoardedEndpoint,
- UpdateUserTourCompletedEndpoint,
- UserActivityEndpoint,
- ## End User
- # Workspaces
- WorkSpaceViewSet,
- UserWorkspaceInvitationsEndpoint,
- UserWorkSpacesEndpoint,
- InviteWorkspaceEndpoint,
- JoinWorkspaceEndpoint,
- WorkSpaceMemberViewSet,
- WorkspaceMembersEndpoint,
- WorkspaceInvitationsViewset,
- UserWorkspaceInvitationsEndpoint,
- WorkspaceMemberUserEndpoint,
- WorkspaceMemberUserViewsEndpoint,
- WorkSpaceAvailabilityCheckEndpoint,
- TeamMemberViewSet,
- AddTeamToProjectEndpoint,
- UserLastProjectWithWorkspaceEndpoint,
- UserWorkspaceInvitationEndpoint,
- UserActivityGraphEndpoint,
- UserIssueCompletedGraphEndpoint,
- UserWorkspaceDashboardEndpoint,
- WorkspaceThemeViewSet,
- WorkspaceUserProfileStatsEndpoint,
- WorkspaceUserActivityEndpoint,
- WorkspaceUserProfileEndpoint,
- WorkspaceUserProfileIssuesEndpoint,
- WorkspaceLabelsEndpoint,
- ## End Workspaces
- # File Assets
- FileAssetEndpoint,
- UserAssetsEndpoint,
- ## End File Assets
- # Projects
- ProjectViewSet,
- InviteProjectEndpoint,
- ProjectMemberViewSet,
- ProjectMemberEndpoint,
- ProjectMemberInvitationsViewset,
- ProjectMemberUserEndpoint,
- AddMemberToProjectEndpoint,
- ProjectJoinEndpoint,
- UserProjectInvitationsViewset,
- ProjectIdentifierEndpoint,
- ProjectFavoritesViewSet,
- ## End Projects
- # Issues
- IssueViewSet,
- WorkSpaceIssuesEndpoint,
- IssueActivityEndpoint,
- IssueCommentViewSet,
- UserWorkSpaceIssues,
- BulkDeleteIssuesEndpoint,
- BulkImportIssuesEndpoint,
- ProjectUserViewsEndpoint,
- IssuePropertyViewSet,
- LabelViewSet,
- SubIssuesEndpoint,
- IssueLinkViewSet,
- BulkCreateIssueLabelsEndpoint,
- IssueAttachmentEndpoint,
- IssueArchiveViewSet,
- IssueSubscriberViewSet,
- IssueCommentPublicViewSet,
- IssueReactionViewSet,
- CommentReactionViewSet,
- ## End Issues
- # States
- StateViewSet,
- ## End States
- # Estimates
- ProjectEstimatePointEndpoint,
- BulkEstimatePointEndpoint,
- ## End Estimates
- # Views
- IssueViewViewSet,
- ViewIssuesEndpoint,
- IssueViewFavoriteViewSet,
- ## End Views
- # Cycles
- CycleViewSet,
- CycleIssueViewSet,
- CycleDateCheckEndpoint,
- CycleFavoriteViewSet,
- TransferCycleIssueEndpoint,
- ## End Cycles
- # Modules
- ModuleViewSet,
- ModuleIssueViewSet,
- ModuleFavoriteViewSet,
- ModuleLinkViewSet,
- BulkImportModulesEndpoint,
- ## End Modules
- # Pages
- PageViewSet,
- PageBlockViewSet,
- PageFavoriteViewSet,
- CreateIssueFromPageBlockEndpoint,
- ## End Pages
- # Api Tokens
- ApiTokenEndpoint,
- ## End Api Tokens
- # Integrations
- IntegrationViewSet,
- WorkspaceIntegrationViewSet,
- GithubRepositoriesEndpoint,
- GithubRepositorySyncViewSet,
- GithubIssueSyncViewSet,
- GithubCommentSyncViewSet,
- BulkCreateGithubIssueSyncEndpoint,
- SlackProjectSyncViewSet,
- ## End Integrations
- # Importer
- ServiceIssueImportSummaryEndpoint,
- ImportServiceEndpoint,
- UpdateServiceImportStatusEndpoint,
- ## End importer
- # Search
- GlobalSearchEndpoint,
- IssueSearchEndpoint,
- ## End Search
- # Gpt
- GPTIntegrationEndpoint,
- ## End Gpt
- # Release Notes
- ReleaseNotesEndpoint,
- ## End Release Notes
- # Inbox
- InboxViewSet,
- InboxIssueViewSet,
- ## End Inbox
- # Analytics
- AnalyticsEndpoint,
- AnalyticViewViewset,
- SavedAnalyticEndpoint,
- ExportAnalyticsEndpoint,
- DefaultAnalyticsEndpoint,
- ## End Analytics
- # Notification
- NotificationViewSet,
- UnreadNotificationEndpoint,
- MarkAllReadNotificationViewSet,
- ## End Notification
- # Public Boards
- ProjectDeployBoardViewSet,
- ProjectIssuesPublicEndpoint,
- ProjectDeployBoardPublicSettingsEndpoint,
- IssueReactionPublicViewSet,
- CommentReactionPublicViewSet,
- InboxIssuePublicViewSet,
- IssueVotePublicViewSet,
- WorkspaceProjectDeployBoardEndpoint,
- IssueRetrievePublicEndpoint,
- ## End Public Boards
- ## Exporter
- ExportIssuesEndpoint,
- ## End Exporter
-
-)
-
-
-urlpatterns = [
- # Social Auth
- path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
- # Auth
- path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
- path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
- path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
- # Magic Sign In/Up
- path(
- "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate"
- ),
- path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"),
- # Email verification
- path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"),
- path(
- "request-email-verify/",
- RequestEmailVerificationEndpoint.as_view(),
- name="request-reset-email",
- ),
- # Password Manipulation
- path(
- "reset-password///",
- ResetPasswordEndpoint.as_view(),
- name="password-reset",
- ),
- path(
- "forgot-password/",
- ForgotPasswordEndpoint.as_view(),
- name="forgot-password",
- ),
- # User Profile
- path(
- "users/me/",
- UserEndpoint.as_view(
- {"get": "retrieve", "patch": "partial_update", "delete": "destroy"}
- ),
- name="users",
- ),
- path(
- "users/me/change-password/",
- ChangePasswordEndpoint.as_view(),
- name="change-password",
- ),
- path(
- "users/me/onboard/",
- UpdateUserOnBoardedEndpoint.as_view(),
- name="user-onboard",
- ),
- path(
- "users/me/tour-completed/",
- UpdateUserTourCompletedEndpoint.as_view(),
- name="user-tour",
- ),
- path("users/workspaces//activities/", UserActivityEndpoint.as_view(), name="user-activities"),
- # user workspaces
- path(
- "users/me/workspaces/",
- UserWorkSpacesEndpoint.as_view(),
- name="user-workspace",
- ),
- # user workspace invitations
- path(
- "users/me/invitations/workspaces/",
- UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}),
- name="user-workspace-invitations",
- ),
- # user workspace invitation
- path(
- "users/me/invitations//",
- UserWorkspaceInvitationEndpoint.as_view(
- {
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- # user join workspace
- # User Graphs
- path(
- "users/me/workspaces//activity-graph/",
- UserActivityGraphEndpoint.as_view(),
- name="user-activity-graph",
- ),
- path(
- "users/me/workspaces//issues-completed-graph/",
- UserIssueCompletedGraphEndpoint.as_view(),
- name="completed-graph",
- ),
- path(
- "users/me/workspaces//dashboard/",
- UserWorkspaceDashboardEndpoint.as_view(),
- name="user-workspace-dashboard",
- ),
- ## User Graph
- path(
- "users/me/invitations/workspaces///join/",
- JoinWorkspaceEndpoint.as_view(),
- name="user-join-workspace",
- ),
- # user project invitations
- path(
- "users/me/invitations/projects/",
- UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}),
- name="user-project-invitaions",
- ),
- ## Workspaces ##
- path(
- "workspace-slug-check/",
- WorkSpaceAvailabilityCheckEndpoint.as_view(),
- name="workspace-availability",
- ),
- path(
- "workspaces/",
- WorkSpaceViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//",
- WorkSpaceViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//invite/",
- InviteWorkspaceEndpoint.as_view(),
- name="workspace",
- ),
- path(
- "workspaces//invitations/",
- WorkspaceInvitationsViewset.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//invitations//",
- WorkspaceInvitationsViewset.as_view(
- {
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//members/",
- WorkSpaceMemberViewSet.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//members//",
- WorkSpaceMemberViewSet.as_view(
- {
- "patch": "partial_update",
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//workspace-members/",
- WorkspaceMembersEndpoint.as_view(),
- name="workspace-members",
- ),
- path(
- "workspaces//teams/",
- TeamMemberViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace",
- ),
- path(
- "workspaces//teams//",
- TeamMemberViewSet.as_view(
- {
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- "get": "retrieve",
- }
- ),
- name="workspace",
- ),
- path(
- "users/last-visited-workspace/",
- UserLastProjectWithWorkspaceEndpoint.as_view(),
- name="workspace-project-details",
- ),
- path(
- "workspaces//workspace-members/me/",
- WorkspaceMemberUserEndpoint.as_view(),
- name="workspace-member-details",
- ),
- path(
- "workspaces//workspace-views/",
- WorkspaceMemberUserViewsEndpoint.as_view(),
- name="workspace-member-details",
- ),
- path(
- "workspaces//workspace-themes/",
- WorkspaceThemeViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="workspace-themes",
- ),
- path(
- "workspaces//workspace-themes//",
- WorkspaceThemeViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="workspace-themes",
- ),
- path(
- "workspaces//user-stats//",
- WorkspaceUserProfileStatsEndpoint.as_view(),
- name="workspace-user-stats",
- ),
- path(
- "workspaces//user-activity//",
- WorkspaceUserActivityEndpoint.as_view(),
- name="workspace-user-activity",
- ),
- path(
- "workspaces//user-profile//",
- WorkspaceUserProfileEndpoint.as_view(),
- name="workspace-user-profile-page",
- ),
- path(
- "workspaces//user-issues//",
- WorkspaceUserProfileIssuesEndpoint.as_view(),
- name="workspace-user-profile-issues",
- ),
- path(
- "workspaces//labels/",
- WorkspaceLabelsEndpoint.as_view(),
- name="workspace-labels",
- ),
- ## End Workspaces ##
- # Projects
- path(
- "workspaces//projects/",
- ProjectViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//",
- ProjectViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//project-identifiers/",
- ProjectIdentifierEndpoint.as_view(),
- name="project-identifiers",
- ),
- path(
- "workspaces//projects//invite/",
- InviteProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//members/",
- ProjectMemberViewSet.as_view({"get": "list"}),
- name="project",
- ),
- path(
- "workspaces//projects//members//",
- ProjectMemberViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//project-members/",
- ProjectMemberEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//members/add/",
- AddMemberToProjectEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects/join/",
- ProjectJoinEndpoint.as_view(),
- name="project",
- ),
- path(
- "workspaces//projects//team-invite/",
- AddTeamToProjectEndpoint.as_view(),
- name="projects",
- ),
- path(
- "workspaces//projects//invitations/",
- ProjectMemberInvitationsViewset.as_view({"get": "list"}),
- name="workspace",
- ),
- path(
- "workspaces//projects//invitations//",
- ProjectMemberInvitationsViewset.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//projects//project-views/",
- ProjectUserViewsEndpoint.as_view(),
- name="project-view",
- ),
- path(
- "workspaces//projects//project-members/me/",
- ProjectMemberUserEndpoint.as_view(),
- name="project-view",
- ),
- path(
- "workspaces//user-favorite-projects/",
- ProjectFavoritesViewSet.as_view(
- {
- "post": "create",
- }
- ),
- name="project",
- ),
- path(
- "workspaces//user-favorite-projects//",
- ProjectFavoritesViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project",
- ),
- # End Projects
- # States
- path(
- "workspaces//projects//states/",
- StateViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-states",
- ),
- path(
- "workspaces//projects//states//",
- StateViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-state",
- ),
- # End States ##
- # Estimates
- path(
- "workspaces//projects//project-estimates/",
- ProjectEstimatePointEndpoint.as_view(),
- name="project-estimate-points",
- ),
- path(
- "workspaces//projects//estimates/",
- BulkEstimatePointEndpoint.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="bulk-create-estimate-points",
- ),
- path(
- "workspaces//projects//estimates//",
- BulkEstimatePointEndpoint.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="bulk-create-estimate-points",
- ),
- # End Estimates ##
- # Views
- path(
- "workspaces//projects//views/",
- IssueViewViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-view",
- ),
- path(
- "workspaces//projects//views//",
- IssueViewViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-view",
- ),
- path(
- "workspaces//projects//views//issues/",
- ViewIssuesEndpoint.as_view(),
- name="project-view-issues",
- ),
- path(
- "workspaces//projects//user-favorite-views/",
- IssueViewFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-view",
- ),
- path(
- "workspaces//projects//user-favorite-views//",
- IssueViewFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-view",
- ),
- ## End Views
- ## Cycles
- path(
- "workspaces//projects//cycles/",
- CycleViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//",
- CycleViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//cycle-issues/",
- CycleIssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles//cycle-issues//",
- CycleIssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//cycles/date-check/",
- CycleDateCheckEndpoint.as_view(),
- name="project-cycle",
- ),
- path(
- "workspaces//projects//user-favorite-cycles/",
- CycleFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-cycle",
- ),
- path(
- "workspaces//projects//user-favorite-cycles//",
- CycleFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-cycle",
- ),
- path(
- "workspaces//projects//cycles//transfer-issues/",
- TransferCycleIssueEndpoint.as_view(),
- name="transfer-issues",
- ),
- ## End Cycles
- # Issue
- path(
- "workspaces//projects//issues/",
- IssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue",
- ),
- path(
- "workspaces//projects//issues//",
- IssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue",
- ),
- path(
- "workspaces//issues/",
- WorkSpaceIssuesEndpoint.as_view(),
- name="workspace-issue",
- ),
- path(
- "workspaces//projects//issue-labels/",
- LabelViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-labels",
- ),
- path(
- "workspaces//projects//issue-labels//",
- LabelViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-labels",
- ),
- path(
- "workspaces//projects//bulk-create-labels/",
- BulkCreateIssueLabelsEndpoint.as_view(),
- name="project-bulk-labels",
- ),
- path(
- "workspaces//projects//bulk-delete-issues/",
- BulkDeleteIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- path(
- "workspaces//projects//bulk-import-issues//",
- BulkImportIssuesEndpoint.as_view(),
- name="project-issues-bulk",
- ),
- path(
- "workspaces//my-issues/",
- UserWorkSpaceIssues.as_view(),
- name="workspace-issues",
- ),
- path(
- "workspaces//projects//issues//sub-issues/",
- SubIssuesEndpoint.as_view(),
- name="sub-issues",
- ),
- path(
- "workspaces//projects//issues//issue-links/",
- IssueLinkViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-links",
- ),
- path(
- "workspaces//projects//issues//issue-links//",
- IssueLinkViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-links",
- ),
- path(
- "workspaces//projects//issues//issue-attachments/",
- IssueAttachmentEndpoint.as_view(),
- name="project-issue-attachments",
- ),
- path(
- "workspaces//projects//issues//issue-attachments//",
- IssueAttachmentEndpoint.as_view(),
- name="project-issue-attachments",
- ),
- path(
- "workspaces//export-issues/",
- ExportIssuesEndpoint.as_view(),
- name="export-issues",
- ),
- ## End Issues
- ## Issue Activity
- path(
- "workspaces//projects//issues//history/",
- IssueActivityEndpoint.as_view(),
- name="project-issue-history",
- ),
- ## Issue Activity
- ## IssueComments
- path(
- "workspaces//projects//issues//comments/",
- IssueCommentViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-comment",
- ),
- path(
- "workspaces//projects//issues//comments//",
- IssueCommentViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-comment",
- ),
- ## End IssueComments
- # Issue Subscribers
- path(
- "workspaces//projects//issues//issue-subscribers/",
- IssueSubscriberViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-subscribers",
- ),
- path(
- "workspaces//projects//issues//issue-subscribers//",
- IssueSubscriberViewSet.as_view({"delete": "destroy"}),
- name="project-issue-subscribers",
- ),
- path(
- "workspaces//projects//issues//subscribe/",
- IssueSubscriberViewSet.as_view(
- {
- "get": "subscription_status",
- "post": "subscribe",
- "delete": "unsubscribe",
- }
- ),
- name="project-issue-subscribers",
- ),
- ## End Issue Subscribers
- # Issue Reactions
- path(
- "workspaces//projects//issues//reactions/",
- IssueReactionViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-reactions",
- ),
- path(
- "workspaces//projects//issues//reactions//",
- IssueReactionViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project-issue-reactions",
- ),
- ## End Issue Reactions
- # Comment Reactions
- path(
- "workspaces//projects//comments//reactions/",
- CommentReactionViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-comment-reactions",
- ),
- path(
- "workspaces//projects//comments//reactions//",
- CommentReactionViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="project-issue-comment-reactions",
- ),
- ## End Comment Reactions
- ## IssueProperty
- path(
- "workspaces//projects//issue-properties/",
- IssuePropertyViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-roadmap",
- ),
- path(
- "workspaces//projects//issue-properties//",
- IssuePropertyViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-roadmap",
- ),
- ## IssueProperty Ebd
- ## Issue Archives
- path(
- "workspaces//projects//archived-issues/",
- IssueArchiveViewSet.as_view(
- {
- "get": "list",
- }
- ),
- name="project-issue-archive",
- ),
- path(
- "workspaces//projects//archived-issues//",
- IssueArchiveViewSet.as_view(
- {
- "get": "retrieve",
- "delete": "destroy",
- }
- ),
- name="project-issue-archive",
- ),
- path(
- "workspaces//projects//unarchive//",
- IssueArchiveViewSet.as_view(
- {
- "post": "unarchive",
- }
- ),
- name="project-issue-archive",
- ),
- ## End Issue Archives
- ## File Assets
- path(
- "workspaces//file-assets/",
- FileAssetEndpoint.as_view(),
- name="file-assets",
- ),
- path(
- "workspaces/file-assets///",
- FileAssetEndpoint.as_view(),
- name="file-assets",
- ),
- path(
- "users/file-assets/",
- UserAssetsEndpoint.as_view(),
- name="user-file-assets",
- ),
- path(
- "users/file-assets//",
- UserAssetsEndpoint.as_view(),
- name="user-file-assets",
- ),
- ## End File Assets
- ## Modules
- path(
- "workspaces//projects//modules/",
- ModuleViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-modules",
- ),
- path(
- "workspaces//projects//modules//",
- ModuleViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-modules",
- ),
- path(
- "workspaces//projects//modules//module-issues/",
- ModuleIssueViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-module-issues",
- ),
- path(
- "workspaces//projects//modules//module-issues//",
- ModuleIssueViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-module-issues",
- ),
- path(
- "workspaces//projects//modules//module-links/",
- ModuleLinkViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-issue-module-links",
- ),
- path(
- "workspaces//projects//modules//module-links//",
- ModuleLinkViewSet.as_view(
- {
- "get": "retrieve",
- "put": "update",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-issue-module-links",
- ),
- path(
- "workspaces//projects//user-favorite-modules/",
- ModuleFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-module",
- ),
- path(
- "workspaces//projects//user-favorite-modules//",
- ModuleFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-module",
- ),
- path(
- "workspaces//projects//bulk-import-modules//",
- BulkImportModulesEndpoint.as_view(),
- name="bulk-modules-create",
- ),
- ## End Modules
- # Pages
- path(
- "workspaces//projects//pages/",
- PageViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-pages",
- ),
- path(
- "workspaces//projects//pages//",
- PageViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-pages",
- ),
- path(
- "workspaces//projects//pages//page-blocks/",
- PageBlockViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="project-page-blocks",
- ),
- path(
- "workspaces//projects//pages//page-blocks//",
- PageBlockViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="project-page-blocks",
- ),
- path(
- "workspaces//projects//user-favorite-pages/",
- PageFavoriteViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="user-favorite-pages",
- ),
- path(
- "workspaces//projects//user-favorite-pages//",
- PageFavoriteViewSet.as_view(
- {
- "delete": "destroy",
- }
- ),
- name="user-favorite-pages",
- ),
- path(
- "workspaces//projects//pages//page-blocks//issues/",
- CreateIssueFromPageBlockEndpoint.as_view(),
- name="page-block-issues",
- ),
- ## End Pages
- # API Tokens
- path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
- path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"),
- ## End API Tokens
- # Integrations
- path(
- "integrations/",
- IntegrationViewSet.as_view(
- {
- "get": "list",
- "post": "create",
- }
- ),
- name="integrations",
- ),
- path(
- "integrations//",
- IntegrationViewSet.as_view(
- {
- "get": "retrieve",
- "patch": "partial_update",
- "delete": "destroy",
- }
- ),
- name="integrations",
- ),
- path(
- "workspaces//workspace-integrations/",
- WorkspaceIntegrationViewSet.as_view(
- {
- "get": "list",
- }
- ),
- name="workspace-integrations",
- ),
- path(
- "workspaces//workspace-integrations/