diff --git a/.deepsource.toml b/.deepsource.toml deleted file mode 100644 index 2b40af672d7..00000000000 --- a/.deepsource.toml +++ /dev/null @@ -1,23 +0,0 @@ -version = 1 - -exclude_patterns = [ - "bin/**", - "**/node_modules/", - "**/*.min.js" -] - -[[analyzers]] -name = "shell" - -[[analyzers]] -name = "javascript" - - [analyzers.meta] - plugins = ["react"] - environment = ["nodejs"] - -[[analyzers]] -name = "python" - - [analyzers.meta] - runtime_version = "3.x.x" \ No newline at end of file diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml index 60ebe583418..ed381453219 100644 --- a/.github/workflows/auto-merge.yml +++ b/.github/workflows/auto-merge.yml @@ -8,13 +8,13 @@ on: env: CURRENT_BRANCH: ${{ github.ref_name }} - SOURCE_BRANCH: ${{ secrets.SYNC_TARGET_BRANCH_NAME }} # The sync branch such as "sync/ce" - TARGET_BRANCH: ${{ secrets.TARGET_BRANCH }} # The target branch that you would like to merge changes like develop + SOURCE_BRANCH: ${{ secrets.SYNC_SOURCE_BRANCH_NAME }} # The sync branch such as "sync/ce" + TARGET_BRANCH: ${{ secrets.SYNC_TARGET_BRANCH_NAME }} # The target branch that you would like to merge changes like develop GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows - REVIEWER: ${{ secrets.REVIEWER }} + REVIEWER: ${{ secrets.SYNC_PR_REVIEWER }} jobs: - Check_Branch: + Check_Branch: runs-on: ubuntu-latest outputs: BRANCH_MATCH: ${{ steps.check-branch.outputs.MATCH }} @@ -27,7 +27,7 @@ jobs: else echo "MATCH=false" >> $GITHUB_OUTPUT fi - + Auto_Merge: if: ${{ needs.Check_Branch.outputs.BRANCH_MATCH == 'true' }} needs: [Check_Branch] @@ -41,6 +41,11 @@ jobs: with: fetch-depth: 0 # Fetch all history for all branches and tags + - name: Setup Git + run: | + git config user.name "GitHub Actions" + git config user.email "actions@github.com" + - name: Setup GH CLI and Git Config run: | type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y) @@ -50,20 +55,6 @@ jobs: sudo apt update sudo apt install gh -y - - id: git-author - name: Setup Git CLI from Github Token - run: | - VIEWER_JSON=$(gh api graphql -f query='query { viewer { name login databaseId }}' --jq '.data.viewer') - VIEWER_NAME=$(jq --raw-output '.name | values' <<< "${VIEWER_JSON}") - VIEWER_LOGIN=$(jq --raw-output '.login' <<< "${VIEWER_JSON}") - VIEWER_DATABASE_ID=$(jq --raw-output '.databaseId' <<< "${VIEWER_JSON}") - - USER_NAME="${VIEWER_NAME:-${VIEWER_LOGIN}}" - USER_EMAIL="${VIEWER_DATABASE_ID}+${VIEWER_LOGIN}@users.noreply.github.com" - - git config --global user.name ${USER_NAME} - git config --global user.email ${USER_EMAIL} - - name: Check for merge conflicts id: conflicts run: | @@ -88,10 +79,6 @@ jobs: - name: Create PR to Target Branch if: env.HAS_CONFLICTS == 'true' run: | - # Use GitHub CLI to create PR and specify author and committer - PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH \ - --title "sync: merge conflicts need to be resolved" \ - --body "" \ - --reviewer $REVIEWER ) + # Replace 'username' with the actual GitHub username of the reviewer. + PR_URL=$(gh pr create --base $TARGET_BRANCH --head $SOURCE_BRANCH --title "sync: merge conflicts need to be resolved" --body "" --reviewer $REVIEWER) echo "Pull Request created: $PR_URL" - diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml index 74d81a83eb7..2977f4cda56 100644 --- a/.github/workflows/build-branch.yml +++ b/.github/workflows/build-branch.yml @@ -6,7 +6,6 @@ on: branches: - master - preview - - develop release: types: [released, prereleased] @@ -18,7 +17,7 @@ jobs: name: Build-Push Web/Space/API/Proxy Docker Image runs-on: ubuntu-latest outputs: - gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }} + gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }} gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }} gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }} gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }} @@ -74,7 +73,7 @@ jobs: - nginx/** branch_build_push_frontend: - if: ${{ (needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master') && !contains(needs.branch_build_setup.outputs.gh_buildx_platforms, 'linux/arm64') }} + if: ${{ (needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master') && !contains(needs.branch_build_setup.outputs.gh_buildx_platforms, 'linux/arm64') }} runs-on: ubuntu-20.04 needs: [branch_build_setup] env: @@ -298,7 +297,7 @@ jobs: ${{ env.FRONTEND_TAG_ARM64 }} branch_build_push_space: - if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }} + if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }} runs-on: ubuntu-20.04 needs: [branch_build_setup] env: @@ -351,7 +350,7 @@ jobs: DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} branch_build_push_backend: - if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }} + if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }} runs-on: ubuntu-20.04 needs: [branch_build_setup] env: @@ -404,7 +403,7 @@ jobs: DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} branch_build_push_proxy: - if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }} + if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }} runs-on: ubuntu-20.04 needs: [branch_build_setup] env: @@ -455,4 +454,3 @@ jobs: DOCKER_BUILDKIT: 1 DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 9f6ab1bfb5c..d7b94d2456d 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -1,13 +1,13 @@ name: "CodeQL" on: + workflow_dispatch: push: - branches: [ 'develop', 'preview', 'master' ] + branches: ["develop", "preview", "master"] pull_request: - # The branches below must be a subset of the branches above - branches: [ 'develop', 'preview', 'master' ] + branches: ["develop", "preview", "master"] schedule: - - cron: '53 19 * * 5' + - cron: "53 19 * * 5" jobs: analyze: @@ -21,45 +21,44 @@ jobs: strategy: fail-fast: false matrix: - language: [ 'python', 'javascript' ] + language: ["python", "javascript"] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Use only 'java' to analyze code written in Java, Kotlin or both # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support steps: - - name: Checkout repository - uses: actions/checkout@v3 + - name: Checkout repository + uses: actions/checkout@v3 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v2 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. - # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs - # queries: security-extended,security-and-quality + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 - # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v2 + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun - # ℹī¸ Command-line programs to run using the OS shell. - # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. - # If the Autobuild fails above, remove it and uncomment the following three lines. - # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh - # - run: | - # echo "Run, Build Application using script" - # ./location_of_script_within_repo/buildscript.sh - - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 - with: - category: "/language:${{matrix.language}}" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/feature-deployment.yml b/.github/workflows/feature-deployment.yml index 7b9f5ffcc4b..c5eec3cd3ad 100644 --- a/.github/workflows/feature-deployment.yml +++ b/.github/workflows/feature-deployment.yml @@ -4,70 +4,196 @@ on: workflow_dispatch: inputs: web-build: - required: true + required: false + description: 'Build Web' type: boolean default: true space-build: - required: true + required: false + description: 'Build Space' type: boolean default: false +env: + BUILD_WEB: ${{ github.event.inputs.web-build }} + BUILD_SPACE: ${{ github.event.inputs.space-build }} + jobs: + setup-feature-build: + name: Feature Build Setup + runs-on: ubuntu-latest + steps: + - name: Checkout + run: | + echo "BUILD_WEB=$BUILD_WEB" + echo "BUILD_SPACE=$BUILD_SPACE" + outputs: + web-build: ${{ env.BUILD_WEB}} + space-build: ${{env.BUILD_SPACE}} + + feature-build-web: + if: ${{ needs.setup-feature-build.outputs.web-build == 'true' }} + needs: setup-feature-build + name: Feature Build Web + runs-on: ubuntu-latest + env: + AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }} + AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }} + NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }} + steps: + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '18' + - name: Install AWS cli + run: | + sudo apt-get update + sudo apt-get install -y python3-pip + pip3 install awscli + - name: Checkout + uses: actions/checkout@v4 + with: + path: plane + - name: Install Dependencies + run: | + cd $GITHUB_WORKSPACE/plane + yarn install + - name: Build Web + id: build-web + run: | + cd $GITHUB_WORKSPACE/plane + yarn build --filter=web + cd $GITHUB_WORKSPACE + + TAR_NAME="web.tar.gz" + tar -czf $TAR_NAME ./plane + + FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ") + aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY + + feature-build-space: + if: ${{ needs.setup-feature-build.outputs.space-build == 'true' }} + needs: setup-feature-build + name: Feature Build Space + runs-on: ubuntu-latest + env: + AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }} + AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }} + NEXT_PUBLIC_DEPLOY_WITH_NGINX: 1 + NEXT_PUBLIC_API_BASE_URL: ${{ vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL }} + outputs: + do-build: ${{ needs.setup-feature-build.outputs.space-build }} + s3-url: ${{ steps.build-space.outputs.S3_PRESIGNED_URL }} + steps: + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '18' + - name: Install AWS cli + run: | + sudo apt-get update + sudo apt-get install -y python3-pip + pip3 install awscli + - name: Checkout + uses: actions/checkout@v4 + with: + path: plane + - name: Install Dependencies + run: | + cd $GITHUB_WORKSPACE/plane + yarn install + - name: Build Space + id: build-space + run: | + cd $GITHUB_WORKSPACE/plane + yarn build --filter=space + cd $GITHUB_WORKSPACE + + TAR_NAME="space.tar.gz" + tar -czf $TAR_NAME ./plane + + FILE_EXPIRY=$(date -u -d "+2 days" +"%Y-%m-%dT%H:%M:%SZ") + aws s3 cp $TAR_NAME s3://${{ env.AWS_BUCKET }}/${{github.sha}}/$TAR_NAME --expires $FILE_EXPIRY + feature-deploy: + if: ${{ always() && (needs.setup-feature-build.outputs.web-build == 'true' || needs.setup-feature-build.outputs.space-build == 'true') }} + needs: [feature-build-web, feature-build-space] name: Feature Deploy runs-on: ubuntu-latest env: - KUBE_CONFIG_FILE: ${{ secrets.KUBE_CONFIG }} - BUILD_WEB: ${{ (github.event.inputs.web-build == '' && true) || github.event.inputs.web-build }} - BUILD_SPACE: ${{ (github.event.inputs.space-build == '' && false) || github.event.inputs.space-build }} - + AWS_ACCESS_KEY_ID: ${{ vars.FEATURE_PREVIEW_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.FEATURE_PREVIEW_AWS_SECRET_ACCESS_KEY }} + AWS_BUCKET: ${{ vars.FEATURE_PREVIEW_AWS_BUCKET }} + KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }} steps: + - name: Install AWS cli + run: | + sudo apt-get update + sudo apt-get install -y python3-pip + pip3 install awscli - name: Tailscale uses: tailscale/github-action@v2 with: oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }} oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }} tags: tag:ci - - name: Kubectl Setup run: | - curl -LO "https://dl.k8s.io/release/${{secrets.KUBE_VERSION}}/bin/linux/amd64/kubectl" + curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl" chmod +x kubectl mkdir -p ~/.kube echo "$KUBE_CONFIG_FILE" > ~/.kube/config chmod 600 ~/.kube/config - - name: HELM Setup run: | curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 chmod 700 get_helm.sh ./get_helm.sh - - name: App Deploy run: | - helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ secrets.FEATURE_PREVIEW_HELM_CHART_URL }} - GIT_BRANCH=${{ github.ref_name }} - APP_NAMESPACE=${{ secrets.FEATURE_PREVIEW_NAMESPACE }} - - METADATA=$(helm install feature-preview/${{ secrets.FEATURE_PREVIEW_HELM_CHART_NAME }} \ - --kube-insecure-skip-tls-verify \ - --generate-name \ - --namespace $APP_NAMESPACE \ - --set shared_config.git_repo=${{github.server_url}}/${{ github.repository }}.git \ - --set shared_config.git_branch="$GIT_BRANCH" \ - --set web.enabled=${{ env.BUILD_WEB }} \ - --set space.enabled=${{ env.BUILD_SPACE }} \ - --output json \ - --timeout 1000s) - - APP_NAME=$(echo $METADATA | jq -r '.name') - - INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \ - -o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \ - jq -r '.spec.rules[0].host') - - echo "****************************************" - echo "APP NAME ::: $APP_NAME" - echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME" - echo "****************************************" + WEB_S3_URL="" + if [ ${{ env.BUILD_WEB }} == true ]; then + WEB_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/web.tar.gz --expires-in 3600) + fi + + SPACE_S3_URL="" + if [ ${{ env.BUILD_SPACE }} == true ]; then + SPACE_S3_URL=$(aws s3 presign s3://${{ vars.FEATURE_PREVIEW_AWS_BUCKET }}/${{github.sha}}/space.tar.gz --expires-in 3600) + fi + + if [ ${{ env.BUILD_WEB }} == true ] || [ ${{ env.BUILD_SPACE }} == true ]; then + + helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }} + + APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}" + DEPLOY_SCRIPT_URL="${{ vars.FEATURE_PREVIEW_DEPLOY_SCRIPT_URL }}" + + METADATA=$(helm --kube-insecure-skip-tls-verify install feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} \ + --generate-name \ + --namespace $APP_NAMESPACE \ + --set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \ + --set web.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \ + --set web.enabled=${{ env.BUILD_WEB || false }} \ + --set web.artifact_url=$WEB_S3_URL \ + --set space.image=${{vars.FEATURE_PREVIEW_DOCKER_BASE}} \ + --set space.enabled=${{ env.BUILD_SPACE || false }} \ + --set space.artifact_url=$SPACE_S3_URL \ + --set shared_config.deploy_script_url=$DEPLOY_SCRIPT_URL \ + --set shared_config.api_base_url=${{vars.FEATURE_PREVIEW_NEXT_PUBLIC_API_BASE_URL}} \ + --output json \ + --timeout 1000s) + + APP_NAME=$(echo $METADATA | jq -r '.name') + + INGRESS_HOSTNAME=$(kubectl get ingress -n feature-builds --insecure-skip-tls-verify \ + -o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \ + jq -r '.spec.rules[0].host') + + echo "****************************************" + echo "APP NAME ::: $APP_NAME" + echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME" + echo "****************************************" + fi diff --git a/.gitignore b/.gitignore index 0b655bd0e75..3989f43561e 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ staticfiles mediafiles .env .DS_Store +logs/ node_modules/ assets/dist/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 148568d76fb..f40c1a244bb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -50,7 +50,6 @@ chmod +x setup.sh docker compose -f docker-compose-local.yml up ``` - ## Missing a Feature? If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository. diff --git a/ENV_SETUP.md b/ENV_SETUP.md index bfc30019624..df05683efd9 100644 --- a/ENV_SETUP.md +++ b/ENV_SETUP.md @@ -53,7 +53,6 @@ NGINX_PORT=80 NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces" ``` - ## {PROJECT_FOLDER}/apiserver/.env ​ diff --git a/README.md b/README.md index 0a9bc0a8a37..a9b3cede1ec 100644 --- a/README.md +++ b/README.md @@ -26,10 +26,10 @@ To selfhost please follow - Website â€ĸ - Releases â€ĸ - Twitter â€ĸ - Documentation + Website â€ĸ + Releases â€ĸ + Twitter â€ĸ + Documentation

@@ -49,30 +49,28 @@ To selfhost please follow - \ No newline at end of file + diff --git a/apiserver/.env.example b/apiserver/.env.example index 6c39183639f..f35cf213a27 100644 --- a/apiserver/.env.example +++ b/apiserver/.env.example @@ -14,10 +14,6 @@ POSTGRES_HOST="plane-db" POSTGRES_DB="plane" DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}/${POSTGRES_DB} -# Oauth variables -GOOGLE_CLIENT_ID="" -GITHUB_CLIENT_ID="" -GITHUB_CLIENT_SECRET="" # Redis Settings REDIS_HOST="plane-redis" @@ -34,11 +30,6 @@ AWS_S3_BUCKET_NAME="uploads" # Maximum file upload limit FILE_SIZE_LIMIT=5242880 -# GPT settings -OPENAI_API_BASE="https://api.openai.com/v1" # deprecated -OPENAI_API_KEY="sk-" # deprecated -GPT_ENGINE="gpt-3.5-turbo" # deprecated - # Settings related to Docker DOCKERIZED=1 # deprecated @@ -74,4 +65,3 @@ WEB_URL="http://localhost" # Gunicorn Workers GUNICORN_WORKERS=2 - diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api index 0e4e0ac501b..34a50334ad4 100644 --- a/apiserver/Dockerfile.api +++ b/apiserver/Dockerfile.api @@ -48,8 +48,10 @@ USER root RUN apk --no-cache add "bash~=5.2" COPY ./bin ./bin/ +RUN mkdir -p /code/plane/logs RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat RUN chmod -R 777 /code +RUN chown -R captain:plane /code USER captain diff --git a/apiserver/Dockerfile.dev b/apiserver/Dockerfile.dev index bd6684fd5f4..06f15231c6e 100644 --- a/apiserver/Dockerfile.dev +++ b/apiserver/Dockerfile.dev @@ -35,6 +35,7 @@ RUN addgroup -S plane && \ COPY . . +RUN mkdir -p /code/plane/logs RUN chown -R captain.plane /code RUN chmod -R +x /code/bin RUN chmod -R 777 /code diff --git a/apiserver/back_migration.py b/apiserver/back_migration.py index a0e45416a45..328b9db2be3 100644 --- a/apiserver/back_migration.py +++ b/apiserver/back_migration.py @@ -182,7 +182,7 @@ def update_label_color(): labels = Label.objects.filter(color="") updated_labels = [] for label in labels: - label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF) + label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}" updated_labels.append(label) Label.objects.bulk_update(updated_labels, ["color"], batch_size=100) diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff index efea53f8749..5a1da1570a1 100755 --- a/apiserver/bin/takeoff +++ b/apiserver/bin/takeoff @@ -21,11 +21,15 @@ SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256 export MACHINE_SIGNATURE=$SIGNATURE # Register instance -python manage.py register_instance $MACHINE_SIGNATURE +python manage.py register_instance "$MACHINE_SIGNATURE" + # Load the configuration variable python manage.py configure_instance # Create the default bucket python manage.py create_bucket -exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:${PORT:-8000} --max-requests 1200 --max-requests-jitter 1000 --access-logfile - +# Clear Cache before starting to remove stale values +python manage.py clear_cache + +exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile - diff --git a/apiserver/bin/takeoff.local b/apiserver/bin/takeoff.local index 8f62370ecf4..3194009b2c4 100755 --- a/apiserver/bin/takeoff.local +++ b/apiserver/bin/takeoff.local @@ -21,12 +21,15 @@ SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256 export MACHINE_SIGNATURE=$SIGNATURE # Register instance -python manage.py register_instance $MACHINE_SIGNATURE +python manage.py register_instance "$MACHINE_SIGNATURE" # Load the configuration variable python manage.py configure_instance # Create the default bucket python manage.py create_bucket +# Clear Cache before starting to remove stale values +python manage.py clear_cache + python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local diff --git a/apiserver/package.json b/apiserver/package.json index 060944406ca..2474aa2f299 100644 --- a/apiserver/package.json +++ b/apiserver/package.json @@ -1,4 +1,4 @@ { "name": "plane-api", - "version": "0.16.0" + "version": "0.17.0" } diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 4c8d6e815b1..c78b109efdf 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -1,31 +1,33 @@ -from lxml import html - +from django.core.exceptions import ValidationError +from django.core.validators import URLValidator # Django imports from django.utils import timezone +from lxml import html # Third party imports from rest_framework import serializers # Module imports from plane.db.models import ( - User, Issue, - State, + IssueActivity, IssueAssignee, - Label, + IssueAttachment, + IssueComment, IssueLabel, IssueLink, - IssueComment, - IssueAttachment, - IssueActivity, + Label, ProjectMember, + State, + User, ) + from .base import BaseSerializer -from .cycle import CycleSerializer, CycleLiteSerializer -from .module import ModuleSerializer, ModuleLiteSerializer -from .user import UserLiteSerializer +from .cycle import CycleLiteSerializer, CycleSerializer +from .module import ModuleLiteSerializer, ModuleSerializer from .state import StateLiteSerializer +from .user import UserLiteSerializer class IssueSerializer(BaseSerializer): @@ -78,7 +80,7 @@ def validate(self, data): data["description_html"] = parsed_str except Exception as e: - raise serializers.ValidationError(f"Invalid HTML: {str(e)}") + raise serializers.ValidationError("Invalid HTML passed") # Validate assignees are from project if data.get("assignees", []): @@ -284,6 +286,20 @@ class Meta: "updated_at", ] + def validate_url(self, value): + # Check URL format + validate_url = URLValidator() + try: + validate_url(value) + except ValidationError: + raise serializers.ValidationError("Invalid URL format.") + + # Check URL scheme + if not value.startswith(("http://", "https://")): + raise serializers.ValidationError("Invalid URL scheme.") + + return value + # Validation if url already exists def create(self, validated_data): if IssueLink.objects.filter( @@ -295,6 +311,17 @@ def create(self, validated_data): ) return IssueLink.objects.create(**validated_data) + def update(self, instance, validated_data): + if IssueLink.objects.filter( + url=validated_data.get("url"), + issue_id=instance.issue_id, + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + + return super().update(instance, validated_data) + class IssueAttachmentSerializer(BaseSerializer): class Meta: @@ -340,7 +367,7 @@ def validate(self, data): data["comment_html"] = parsed_str except Exception as e: - raise serializers.ValidationError(f"Invalid HTML: {str(e)}") + raise serializers.ValidationError("Invalid HTML passed") return data diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 342cc1a81da..9dd4c9b85f3 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -6,8 +6,6 @@ Project, ProjectIdentifier, WorkspaceMember, - State, - Estimate, ) from .base import BaseSerializer diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py index 593e501bf98..0a775454bc5 100644 --- a/apiserver/plane/api/urls/cycle.py +++ b/apiserver/plane/api/urls/cycle.py @@ -4,6 +4,7 @@ CycleAPIEndpoint, CycleIssueAPIEndpoint, TransferCycleIssueAPIEndpoint, + CycleArchiveUnarchiveAPIEndpoint, ) urlpatterns = [ @@ -32,4 +33,14 @@ TransferCycleIssueAPIEndpoint.as_view(), name="transfer-issues", ), + path( + "workspaces//projects//cycles//archive/", + CycleArchiveUnarchiveAPIEndpoint.as_view(), + name="cycle-archive-unarchive", + ), + path( + "workspaces//projects//archived-cycles/", + CycleArchiveUnarchiveAPIEndpoint.as_view(), + name="cycle-archive-unarchive", + ), ] diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py index 4309f44e968..a131f4d4f92 100644 --- a/apiserver/plane/api/urls/module.py +++ b/apiserver/plane/api/urls/module.py @@ -1,6 +1,10 @@ from django.urls import path -from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint +from plane.api.views import ( + ModuleAPIEndpoint, + ModuleIssueAPIEndpoint, + ModuleArchiveUnarchiveAPIEndpoint, +) urlpatterns = [ path( @@ -23,4 +27,14 @@ ModuleIssueAPIEndpoint.as_view(), name="module-issues", ), + path( + "workspaces//projects//modules//archive/", + ModuleArchiveUnarchiveAPIEndpoint.as_view(), + name="module-archive-unarchive", + ), + path( + "workspaces//projects//archived-modules/", + ModuleArchiveUnarchiveAPIEndpoint.as_view(), + name="module-archive-unarchive", + ), ] diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py index 1ed450c8614..490371ccab1 100644 --- a/apiserver/plane/api/urls/project.py +++ b/apiserver/plane/api/urls/project.py @@ -1,6 +1,9 @@ from django.urls import path -from plane.api.views import ProjectAPIEndpoint +from plane.api.views import ( + ProjectAPIEndpoint, + ProjectArchiveUnarchiveAPIEndpoint, +) urlpatterns = [ path( @@ -13,4 +16,9 @@ ProjectAPIEndpoint.as_view(), name="project", ), + path( + "workspaces//projects//archive/", + ProjectArchiveUnarchiveAPIEndpoint.as_view(), + name="project-archive-unarchive", + ), ] diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 0da79566f45..574ec69b6a9 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -1,4 +1,4 @@ -from .project import ProjectAPIEndpoint +from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint from .state import StateAPIEndpoint @@ -14,8 +14,13 @@ CycleAPIEndpoint, CycleIssueAPIEndpoint, TransferCycleIssueAPIEndpoint, + CycleArchiveUnarchiveAPIEndpoint, ) -from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint +from .module import ( + ModuleAPIEndpoint, + ModuleIssueAPIEndpoint, + ModuleArchiveUnarchiveAPIEndpoint, +) from .inbox import InboxIssueAPIEndpoint diff --git a/apiserver/plane/api/views/base.py b/apiserver/plane/api/views/base.py index edb89f9b187..0cf5e8731c8 100644 --- a/apiserver/plane/api/views/base.py +++ b/apiserver/plane/api/views/base.py @@ -1,27 +1,26 @@ # Python imports -import zoneinfo -import json from urllib.parse import urlparse +import zoneinfo # Django imports from django.conf import settings -from django.db import IntegrityError from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.db import IntegrityError from django.utils import timezone +from rest_framework import status +from rest_framework.permissions import IsAuthenticated +from rest_framework.response import Response # Third party imports from rest_framework.views import APIView -from rest_framework.response import Response -from rest_framework.permissions import IsAuthenticated -from rest_framework import status -from sentry_sdk import capture_exception # Module imports from plane.api.middleware.api_authentication import APIKeyAuthentication from plane.api.rate_limit import ApiKeyRateThrottle -from plane.utils.paginator import BasePaginator from plane.bgtasks.webhook_task import send_webhook +from plane.utils.exception_logger import log_exception +from plane.utils.paginator import BasePaginator class TimezoneMixin: @@ -107,27 +106,23 @@ def handle_exception(self, exc): if isinstance(e, ValidationError): return Response( - { - "error": "The provided payload is not valid please try with a valid payload" - }, + {"error": "Please provide valid detail"}, status=status.HTTP_400_BAD_REQUEST, ) if isinstance(e, ObjectDoesNotExist): return Response( - {"error": f"The required object does not exist."}, + {"error": "The requested resource does not exist."}, status=status.HTTP_404_NOT_FOUND, ) if isinstance(e, KeyError): return Response( - {"error": f" The required key does not exist."}, + {"error": "The required key does not exist."}, status=status.HTTP_400_BAD_REQUEST, ) - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index 84931f46be9..637d713c319 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -2,7 +2,7 @@ import json # Django imports -from django.db.models import Q, Count, Sum, Prefetch, F, OuterRef, Func +from django.db.models import Q, Count, Sum, F, OuterRef, Func from django.utils import timezone from django.core import serializers @@ -140,7 +140,9 @@ def get_queryset(self): def get(self, request, slug, project_id, pk=None): if pk: - queryset = self.get_queryset().get(pk=pk) + queryset = ( + self.get_queryset().filter(archived_at__isnull=True).get(pk=pk) + ) data = CycleSerializer( queryset, fields=self.fields, @@ -150,7 +152,9 @@ def get(self, request, slug, project_id, pk=None): data, status=status.HTTP_200_OK, ) - queryset = self.get_queryset() + queryset = ( + self.get_queryset().filter(archived_at__isnull=True) + ) cycle_view = request.GET.get("cycle_view", "all") # Current Cycle @@ -291,6 +295,11 @@ def patch(self, request, slug, project_id, pk): cycle = Cycle.objects.get( workspace__slug=slug, project_id=project_id, pk=pk ) + if cycle.archived_at: + return Response( + {"error": "Archived cycle cannot be edited"}, + status=status.HTTP_400_BAD_REQUEST, + ) request_data = request.data @@ -321,7 +330,9 @@ def patch(self, request, slug, project_id, pk): and Cycle.objects.filter( project_id=project_id, workspace__slug=slug, - external_source=request.data.get("external_source", cycle.external_source), + external_source=request.data.get( + "external_source", cycle.external_source + ), external_id=request.data.get("external_id"), ).exists() ): @@ -366,6 +377,139 @@ def delete(self, request, slug, project_id, pk): return Response(status=status.HTTP_204_NO_CONTENT) +class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView): + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return ( + Cycle.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(archived_at__isnull=False) + .select_related("project") + .select_related("workspace") + .select_related("owned_by") + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + total_estimates=Sum("issue_cycle__issue__estimate_point") + ) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + + def get(self, request, slug, project_id): + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda cycles: CycleSerializer( + cycles, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, pk): + cycle = Cycle.objects.get( + pk=pk, project_id=project_id, workspace__slug=slug + ) + cycle.archived_at = timezone.now() + cycle.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def delete(self, request, slug, project_id, pk): + cycle = Cycle.objects.get( + pk=pk, project_id=project_id, workspace__slug=slug + ) + cycle.archived_at = None + cycle.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView): """ This viewset automatically provides `list`, `create`, diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index c1079345ac2..fb36ea2a940 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -119,7 +119,7 @@ def post(self, request, slug, project_id): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ + if request.data.get("issue", {}).get("priority", "none") not in [ "low", "medium", "high", diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index bf3313779c2..4b59dc02076 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -1,22 +1,22 @@ # Python imports import json -from itertools import chain + +from django.core.serializers.json import DjangoJSONEncoder # Django imports from django.db import IntegrityError from django.db.models import ( - OuterRef, - Func, - Q, - F, Case, - When, - Value, CharField, - Max, Exists, + F, + Func, + Max, + OuterRef, + Q, + Value, + When, ) -from django.core.serializers.json import DjangoJSONEncoder from django.utils import timezone # Third party imports @@ -24,31 +24,32 @@ from rest_framework.response import Response # Module imports -from .base import BaseAPIView, WebhookMixin +from plane.api.serializers import ( + IssueActivitySerializer, + IssueCommentSerializer, + IssueLinkSerializer, + IssueSerializer, + LabelSerializer, +) from plane.app.permissions import ( ProjectEntityPermission, - ProjectMemberPermission, ProjectLitePermission, + ProjectMemberPermission, ) +from plane.bgtasks.issue_activites_task import issue_activity from plane.db.models import ( Issue, + IssueActivity, IssueAttachment, + IssueComment, IssueLink, - Project, Label, + Project, ProjectMember, - IssueComment, - IssueActivity, -) -from plane.bgtasks.issue_activites_task import issue_activity -from plane.api.serializers import ( - IssueSerializer, - LabelSerializer, - IssueLinkSerializer, - IssueCommentSerializer, - IssueActivitySerializer, ) +from .base import BaseAPIView, WebhookMixin + class IssueAPIEndpoint(WebhookMixin, BaseAPIView): """ @@ -356,6 +357,7 @@ def get_queryset(self): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .select_related("project") .select_related("workspace") .select_related("parent") @@ -488,6 +490,7 @@ def get_queryset(self): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .order_by(self.kwargs.get("order_by", "-created_at")) .distinct() ) @@ -617,6 +620,7 @@ def get_queryset(self): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .select_related("workspace", "project", "issue", "actor") .annotate( is_member=Exists( @@ -653,7 +657,6 @@ def get(self, request, slug, project_id, issue_id, pk=None): ) def post(self, request, slug, project_id, issue_id): - # Validation check if the issue already exists if ( request.data.get("external_id") @@ -679,7 +682,6 @@ def post(self, request, slug, project_id, issue_id): status=status.HTTP_409_CONFLICT, ) - serializer = IssueCommentSerializer(data=request.data) if serializer.is_valid(): serializer.save( @@ -717,7 +719,10 @@ def patch(self, request, slug, project_id, issue_id, pk): # Validation check if the issue already exists if ( request.data.get("external_id") - and (issue_comment.external_id != str(request.data.get("external_id"))) + and ( + issue_comment.external_id + != str(request.data.get("external_id")) + ) and IssueComment.objects.filter( project_id=project_id, workspace__slug=slug, @@ -735,7 +740,6 @@ def patch(self, request, slug, project_id, issue_id, pk): status=status.HTTP_409_CONFLICT, ) - serializer = IssueCommentSerializer( issue_comment, data=request.data, partial=True ) @@ -792,6 +796,7 @@ def get(self, request, slug, project_id, issue_id, pk=None): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .select_related("actor", "workspace", "issue", "project") ).order_by(request.GET.get("order_by", "created_at")) diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 2e5bb85e2b7..643221dcabe 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -67,6 +67,7 @@ def get_queryset(self): issue_module__issue__archived_at__isnull=True, issue_module__issue__is_draft=False, ), + distinct=True, ), ) .annotate( @@ -77,6 +78,7 @@ def get_queryset(self): issue_module__issue__archived_at__isnull=True, issue_module__issue__is_draft=False, ), + distinct=True, ) ) .annotate( @@ -87,6 +89,7 @@ def get_queryset(self): issue_module__issue__archived_at__isnull=True, issue_module__issue__is_draft=False, ), + distinct=True, ) ) .annotate( @@ -97,6 +100,7 @@ def get_queryset(self): issue_module__issue__archived_at__isnull=True, issue_module__issue__is_draft=False, ), + distinct=True, ) ) .annotate( @@ -107,6 +111,7 @@ def get_queryset(self): issue_module__issue__archived_at__isnull=True, issue_module__issue__is_draft=False, ), + distinct=True, ) ) .annotate( @@ -117,6 +122,7 @@ def get_queryset(self): issue_module__issue__archived_at__isnull=True, issue_module__issue__is_draft=False, ), + distinct=True, ) ) .order_by(self.kwargs.get("order_by", "-created_at")) @@ -165,6 +171,11 @@ def patch(self, request, slug, project_id, pk): module = Module.objects.get( pk=pk, project_id=project_id, workspace__slug=slug ) + if module.archived_at: + return Response( + {"error": "Archived module cannot be edited"}, + status=status.HTTP_400_BAD_REQUEST, + ) serializer = ModuleSerializer( module, data=request.data, @@ -178,7 +189,9 @@ def patch(self, request, slug, project_id, pk): and Module.objects.filter( project_id=project_id, workspace__slug=slug, - external_source=request.data.get("external_source", module.external_source), + external_source=request.data.get( + "external_source", module.external_source + ), external_id=request.data.get("external_id"), ).exists() ): @@ -195,7 +208,9 @@ def patch(self, request, slug, project_id, pk): def get(self, request, slug, project_id, pk=None): if pk: - queryset = self.get_queryset().get(pk=pk) + queryset = ( + self.get_queryset().filter(archived_at__isnull=True).get(pk=pk) + ) data = ModuleSerializer( queryset, fields=self.fields, @@ -207,7 +222,7 @@ def get(self, request, slug, project_id, pk=None): ) return self.paginate( request=request, - queryset=(self.get_queryset()), + queryset=(self.get_queryset().filter(archived_at__isnull=True)), on_results=lambda modules: ModuleSerializer( modules, many=True, @@ -277,6 +292,7 @@ def get_queryset(self): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .select_related("project") .select_related("workspace") .select_related("module") @@ -444,3 +460,123 @@ def delete(self, request, slug, project_id, module_id, issue_id): epoch=int(timezone.now().timestamp()), ) return Response(status=status.HTTP_204_NO_CONTENT) + + +class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView): + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return ( + Module.objects.filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(archived_at__isnull=False) + .select_related("project") + .select_related("workspace") + .select_related("lead") + .prefetch_related("members") + .prefetch_related( + Prefetch( + "link_module", + queryset=ModuleLink.objects.select_related( + "module", "created_by" + ), + ) + ) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ), + ) + .annotate( + completed_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + cancelled_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + started_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + unstarted_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + backlog_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + def get(self, request, slug, project_id): + return self.paginate( + request=request, + queryset=(self.get_queryset()), + on_results=lambda modules: ModuleSerializer( + modules, + many=True, + fields=self.fields, + expand=self.expand, + ).data, + ) + + def post(self, request, slug, project_id, pk): + module = Module.objects.get( + pk=pk, project_id=project_id, workspace__slug=slug + ) + module.archived_at = timezone.now() + module.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def delete(self, request, slug, project_id, pk): + module = Module.objects.get( + pk=pk, project_id=project_id, workspace__slug=slug + ) + module.archived_at = None + module.save() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index cb1f7dc7bc0..e0bce5514a2 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -1,4 +1,5 @@ # Django imports +from django.utils import timezone from django.db import IntegrityError from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch @@ -11,7 +12,6 @@ from plane.db.models import ( Workspace, Project, - ProjectFavorite, ProjectMember, ProjectDeployBoard, State, @@ -40,7 +40,10 @@ def get_queryset(self): return ( Project.objects.filter(workspace__slug=self.kwargs.get("slug")) .filter( - Q(project_projectmember__member=self.request.user) + Q( + project_projectmember__member=self.request.user, + project_projectmember__is_active=True, + ) | Q(network=2) ) .select_related( @@ -150,7 +153,7 @@ def post(self, request, slug): serializer.save() # Add the user as Administrator to the project - project_member = ProjectMember.objects.create( + _ = ProjectMember.objects.create( project_id=serializer.data["id"], member=request.user, role=20, @@ -245,12 +248,12 @@ def post(self, request, slug): {"name": "The project name is already taken"}, status=status.HTTP_410_GONE, ) - except Workspace.DoesNotExist as e: + except Workspace.DoesNotExist: return Response( {"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND, ) - except ValidationError as e: + except ValidationError: return Response( {"identifier": "The project identifier is already taken"}, status=status.HTTP_410_GONE, @@ -261,6 +264,12 @@ def patch(self, request, slug, project_id=None): workspace = Workspace.objects.get(slug=slug) project = Project.objects.get(pk=project_id) + if project.archived_at: + return Response( + {"error": "Archived project cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) + serializer = ProjectSerializer( project, data={**request.data}, @@ -307,7 +316,7 @@ def patch(self, request, slug, project_id=None): {"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND, ) - except ValidationError as e: + except ValidationError: return Response( {"identifier": "The project identifier is already taken"}, status=status.HTTP_410_GONE, @@ -317,3 +326,22 @@ def delete(self, request, slug, project_id): project = Project.objects.get(pk=project_id, workspace__slug=slug) project.delete() return Response(status=status.HTTP_204_NO_CONTENT) + + +class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView): + + permission_classes = [ + ProjectBasePermission, + ] + + def post(self, request, slug, project_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + project.archived_at = timezone.now() + project.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def delete(self, request, slug, project_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + project.archived_at = None + project.save() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py index ec10f9babe1..4ee899831fb 100644 --- a/apiserver/plane/api/views/state.py +++ b/apiserver/plane/api/views/state.py @@ -28,6 +28,7 @@ def get_queryset(self): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .filter(~Q(name="Triage")) .select_related("project") .select_related("workspace") @@ -66,8 +67,10 @@ def post(self, request, slug, project_id): serializer.save(project_id=project_id) return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - except IntegrityError as e: + return Response( + serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + except IntegrityError: state = State.objects.filter( workspace__slug=slug, project_id=project_id, @@ -136,7 +139,9 @@ def patch(self, request, slug, project_id, state_id=None): and State.objects.filter( project_id=project_id, workspace__slug=slug, - external_source=request.data.get("external_source", state.external_source), + external_source=request.data.get( + "external_source", state.external_source + ), external_id=request.data.get("external_id"), ).exists() ): diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py index 9bdd4baaf9d..22673dabceb 100644 --- a/apiserver/plane/app/serializers/__init__.py +++ b/apiserver/plane/app/serializers/__init__.py @@ -86,16 +86,6 @@ from .api import APITokenSerializer, APITokenReadSerializer -from .integration import ( - IntegrationSerializer, - WorkspaceIntegrationSerializer, - GithubIssueSyncSerializer, - GithubRepositorySerializer, - GithubRepositorySyncSerializer, - GithubCommentSyncSerializer, - SlackProjectSyncSerializer, -) - from .importer import ImporterSerializer from .page import ( @@ -121,7 +111,10 @@ from .analytic import AnalyticViewSerializer -from .notification import NotificationSerializer, UserNotificationPreferenceSerializer +from .notification import ( + NotificationSerializer, + UserNotificationPreferenceSerializer, +) from .exporter import ExporterHistorySerializer diff --git a/apiserver/plane/app/serializers/cycle.py b/apiserver/plane/app/serializers/cycle.py index a273b349c3d..13d321780d8 100644 --- a/apiserver/plane/app/serializers/cycle.py +++ b/apiserver/plane/app/serializers/cycle.py @@ -11,6 +11,7 @@ CycleUserProperties, ) + class CycleWriteSerializer(BaseSerializer): def validate(self, data): if ( @@ -30,6 +31,7 @@ class Meta: "workspace", "project", "owned_by", + "archived_at", ] @@ -47,7 +49,6 @@ class CycleSerializer(BaseSerializer): # active | draft | upcoming | completed status = serializers.CharField(read_only=True) - class Meta: model = Cycle fields = [ diff --git a/apiserver/plane/app/serializers/dashboard.py b/apiserver/plane/app/serializers/dashboard.py index 8fca3c9064b..b0ed8841beb 100644 --- a/apiserver/plane/app/serializers/dashboard.py +++ b/apiserver/plane/app/serializers/dashboard.py @@ -18,9 +18,4 @@ class WidgetSerializer(BaseSerializer): class Meta: model = Widget - fields = [ - "id", - "key", - "is_visible", - "widget_filters" - ] \ No newline at end of file + fields = ["id", "key", "is_visible", "widget_filters"] diff --git a/apiserver/plane/app/serializers/estimate.py b/apiserver/plane/app/serializers/estimate.py index 6753900803e..d28f38c75ab 100644 --- a/apiserver/plane/app/serializers/estimate.py +++ b/apiserver/plane/app/serializers/estimate.py @@ -74,5 +74,3 @@ class Meta: "name", "description", ] - - diff --git a/apiserver/plane/app/serializers/integration/__init__.py b/apiserver/plane/app/serializers/integration/__init__.py deleted file mode 100644 index 112ff02d162..00000000000 --- a/apiserver/plane/app/serializers/integration/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from .base import IntegrationSerializer, WorkspaceIntegrationSerializer -from .github import ( - GithubRepositorySerializer, - GithubRepositorySyncSerializer, - GithubIssueSyncSerializer, - GithubCommentSyncSerializer, -) -from .slack import SlackProjectSyncSerializer diff --git a/apiserver/plane/app/serializers/integration/base.py b/apiserver/plane/app/serializers/integration/base.py deleted file mode 100644 index 01e484ed027..00000000000 --- a/apiserver/plane/app/serializers/integration/base.py +++ /dev/null @@ -1,22 +0,0 @@ -# Module imports -from plane.app.serializers import BaseSerializer -from plane.db.models import Integration, WorkspaceIntegration - - -class IntegrationSerializer(BaseSerializer): - class Meta: - model = Integration - fields = "__all__" - read_only_fields = [ - "verified", - ] - - -class WorkspaceIntegrationSerializer(BaseSerializer): - integration_detail = IntegrationSerializer( - read_only=True, source="integration" - ) - - class Meta: - model = WorkspaceIntegration - fields = "__all__" diff --git a/apiserver/plane/app/serializers/integration/github.py b/apiserver/plane/app/serializers/integration/github.py deleted file mode 100644 index 850bccf1b3a..00000000000 --- a/apiserver/plane/app/serializers/integration/github.py +++ /dev/null @@ -1,45 +0,0 @@ -# Module imports -from plane.app.serializers import BaseSerializer -from plane.db.models import ( - GithubIssueSync, - GithubRepository, - GithubRepositorySync, - GithubCommentSync, -) - - -class GithubRepositorySerializer(BaseSerializer): - class Meta: - model = GithubRepository - fields = "__all__" - - -class GithubRepositorySyncSerializer(BaseSerializer): - repo_detail = GithubRepositorySerializer(source="repository") - - class Meta: - model = GithubRepositorySync - fields = "__all__" - - -class GithubIssueSyncSerializer(BaseSerializer): - class Meta: - model = GithubIssueSync - fields = "__all__" - read_only_fields = [ - "project", - "workspace", - "repository_sync", - ] - - -class GithubCommentSyncSerializer(BaseSerializer): - class Meta: - model = GithubCommentSync - fields = "__all__" - read_only_fields = [ - "project", - "workspace", - "repository_sync", - "issue_sync", - ] diff --git a/apiserver/plane/app/serializers/integration/slack.py b/apiserver/plane/app/serializers/integration/slack.py deleted file mode 100644 index 9c461c5b9b5..00000000000 --- a/apiserver/plane/app/serializers/integration/slack.py +++ /dev/null @@ -1,14 +0,0 @@ -# Module imports -from plane.app.serializers import BaseSerializer -from plane.db.models import SlackProjectSync - - -class SlackProjectSyncSerializer(BaseSerializer): - class Meta: - model = SlackProjectSync - fields = "__all__" - read_only_fields = [ - "project", - "workspace", - "workspace_integration", - ] diff --git a/apiserver/plane/app/serializers/issue.py b/apiserver/plane/app/serializers/issue.py index 411c5b73f88..fc0e6f838ce 100644 --- a/apiserver/plane/app/serializers/issue.py +++ b/apiserver/plane/app/serializers/issue.py @@ -1,5 +1,7 @@ # Django imports from django.utils import timezone +from django.core.validators import URLValidator +from django.core.exceptions import ValidationError # Third Party imports from rest_framework import serializers @@ -7,7 +9,7 @@ # Module imports from .base import BaseSerializer, DynamicBaseSerializer from .user import UserLiteSerializer -from .state import StateSerializer, StateLiteSerializer +from .state import StateLiteSerializer from .project import ProjectLiteSerializer from .workspace import WorkspaceLiteSerializer from plane.db.models import ( @@ -31,7 +33,6 @@ IssueVote, IssueRelation, State, - Project, ) @@ -432,6 +433,20 @@ class Meta: "issue", ] + def validate_url(self, value): + # Check URL format + validate_url = URLValidator() + try: + validate_url(value) + except ValidationError: + raise serializers.ValidationError("Invalid URL format.") + + # Check URL scheme + if not value.startswith(('http://', 'https://')): + raise serializers.ValidationError("Invalid URL scheme.") + + return value + # Validation if url already exists def create(self, validated_data): if IssueLink.objects.filter( @@ -443,9 +458,19 @@ def create(self, validated_data): ) return IssueLink.objects.create(**validated_data) + def update(self, instance, validated_data): + if IssueLink.objects.filter( + url=validated_data.get("url"), + issue_id=instance.issue_id, + ).exists(): + raise serializers.ValidationError( + {"error": "URL already exists for this Issue"} + ) + + return super().update(instance, validated_data) -class IssueLinkLiteSerializer(BaseSerializer): +class IssueLinkLiteSerializer(BaseSerializer): class Meta: model = IssueLink fields = [ @@ -476,7 +501,6 @@ class Meta: class IssueAttachmentLiteSerializer(DynamicBaseSerializer): - class Meta: model = IssueAttachment fields = [ @@ -505,13 +529,12 @@ class Meta: class IssueReactionLiteSerializer(DynamicBaseSerializer): - class Meta: model = IssueReaction fields = [ "id", - "actor_id", - "issue_id", + "actor", + "issue", "reaction", ] @@ -601,15 +624,18 @@ class IssueSerializer(DynamicBaseSerializer): # ids cycle_id = serializers.PrimaryKeyRelatedField(read_only=True) module_ids = serializers.ListField( - child=serializers.UUIDField(), required=False, + child=serializers.UUIDField(), + required=False, ) # Many to many label_ids = serializers.ListField( - child=serializers.UUIDField(), required=False, + child=serializers.UUIDField(), + required=False, ) assignee_ids = serializers.ListField( - child=serializers.UUIDField(), required=False, + child=serializers.UUIDField(), + required=False, ) # Count items @@ -649,19 +675,7 @@ class Meta: read_only_fields = fields -class IssueDetailSerializer(IssueSerializer): - description_html = serializers.CharField() - is_subscribed = serializers.BooleanField(read_only=True) - - class Meta(IssueSerializer.Meta): - fields = IssueSerializer.Meta.fields + [ - "description_html", - "is_subscribed", - ] - - class IssueLiteSerializer(DynamicBaseSerializer): - class Meta: model = Issue fields = [ diff --git a/apiserver/plane/app/serializers/module.py b/apiserver/plane/app/serializers/module.py index 4aabfc50efd..dfdd265cd92 100644 --- a/apiserver/plane/app/serializers/module.py +++ b/apiserver/plane/app/serializers/module.py @@ -3,7 +3,6 @@ # Module imports from .base import BaseSerializer, DynamicBaseSerializer -from .user import UserLiteSerializer from .project import ProjectLiteSerializer from plane.db.models import ( @@ -40,6 +39,7 @@ class Meta: "updated_by", "created_at", "updated_at", + "archived_at", ] def to_representation(self, instance): @@ -142,7 +142,6 @@ class Meta: class ModuleLinkSerializer(BaseSerializer): - class Meta: model = ModuleLink fields = "__all__" @@ -215,13 +214,12 @@ class Meta: read_only_fields = fields - class ModuleDetailSerializer(ModuleSerializer): - link_module = ModuleLinkSerializer(read_only=True, many=True) + sub_issues = serializers.IntegerField(read_only=True) class Meta(ModuleSerializer.Meta): - fields = ModuleSerializer.Meta.fields + ['link_module'] + fields = ModuleSerializer.Meta.fields + ["link_module", "sub_issues"] class ModuleFavoriteSerializer(BaseSerializer): diff --git a/apiserver/plane/app/serializers/notification.py b/apiserver/plane/app/serializers/notification.py index 2152fcf0f9c..c6713a3540d 100644 --- a/apiserver/plane/app/serializers/notification.py +++ b/apiserver/plane/app/serializers/notification.py @@ -15,7 +15,6 @@ class Meta: class UserNotificationPreferenceSerializer(BaseSerializer): - class Meta: model = UserNotificationPreference fields = "__all__" diff --git a/apiserver/plane/app/serializers/page.py b/apiserver/plane/app/serializers/page.py index a0f5986d69f..4dfe6ea9d65 100644 --- a/apiserver/plane/app/serializers/page.py +++ b/apiserver/plane/app/serializers/page.py @@ -3,7 +3,7 @@ # Module imports from .base import BaseSerializer -from .issue import IssueFlatSerializer, LabelLiteSerializer +from .issue import LabelLiteSerializer from .workspace import WorkspaceLiteSerializer from .project import ProjectLiteSerializer from plane.db.models import ( @@ -12,8 +12,6 @@ PageFavorite, PageLabel, Label, - Issue, - Module, ) diff --git a/apiserver/plane/app/serializers/project.py b/apiserver/plane/app/serializers/project.py index 999233442a4..a0c2318e381 100644 --- a/apiserver/plane/app/serializers/project.py +++ b/apiserver/plane/app/serializers/project.py @@ -95,14 +95,19 @@ class Meta: "identifier", "name", "cover_image", - "icon_prop", - "emoji", + "logo_props", "description", ] read_only_fields = fields class ProjectListSerializer(DynamicBaseSerializer): + total_issues = serializers.IntegerField(read_only=True) + archived_issues = serializers.IntegerField(read_only=True) + archived_sub_issues = serializers.IntegerField(read_only=True) + draft_issues = serializers.IntegerField(read_only=True) + draft_sub_issues = serializers.IntegerField(read_only=True) + sub_issues = serializers.IntegerField(read_only=True) is_favorite = serializers.BooleanField(read_only=True) total_members = serializers.IntegerField(read_only=True) total_cycles = serializers.IntegerField(read_only=True) diff --git a/apiserver/plane/app/serializers/user.py b/apiserver/plane/app/serializers/user.py index 8cd48827e13..d6c15ee7fa9 100644 --- a/apiserver/plane/app/serializers/user.py +++ b/apiserver/plane/app/serializers/user.py @@ -4,7 +4,6 @@ # Module import from .base import BaseSerializer from plane.db.models import User, Workspace, WorkspaceMemberInvite -from plane.license.models import InstanceAdmin, Instance class UserSerializer(BaseSerializer): @@ -99,13 +98,13 @@ def get_workspace(self, obj): ).first() return { "last_workspace_id": obj.last_workspace_id, - "last_workspace_slug": workspace.slug - if workspace is not None - else "", + "last_workspace_slug": ( + workspace.slug if workspace is not None else "" + ), "fallback_workspace_id": obj.last_workspace_id, - "fallback_workspace_slug": workspace.slug - if workspace is not None - else "", + "fallback_workspace_slug": ( + workspace.slug if workspace is not None else "" + ), "invites": workspace_invites, } else: @@ -120,12 +119,16 @@ def get_workspace(self, obj): return { "last_workspace_id": None, "last_workspace_slug": None, - "fallback_workspace_id": fallback_workspace.id - if fallback_workspace is not None - else None, - "fallback_workspace_slug": fallback_workspace.slug - if fallback_workspace is not None - else None, + "fallback_workspace_id": ( + fallback_workspace.id + if fallback_workspace is not None + else None + ), + "fallback_workspace_slug": ( + fallback_workspace.slug + if fallback_workspace is not None + else None + ), "invites": workspace_invites, } diff --git a/apiserver/plane/app/serializers/webhook.py b/apiserver/plane/app/serializers/webhook.py index 95ca149ffa4..175dea3047d 100644 --- a/apiserver/plane/app/serializers/webhook.py +++ b/apiserver/plane/app/serializers/webhook.py @@ -1,5 +1,4 @@ # Python imports -import urllib import socket import ipaddress from urllib.parse import urlparse diff --git a/apiserver/plane/app/urls/__init__.py b/apiserver/plane/app/urls/__init__.py index f2b11f12761..40b96687d33 100644 --- a/apiserver/plane/app/urls/__init__.py +++ b/apiserver/plane/app/urls/__init__.py @@ -6,9 +6,7 @@ from .dashboard import urlpatterns as dashboard_urls from .estimate import urlpatterns as estimate_urls from .external import urlpatterns as external_urls -from .importer import urlpatterns as importer_urls from .inbox import urlpatterns as inbox_urls -from .integration import urlpatterns as integration_urls from .issue import urlpatterns as issue_urls from .module import urlpatterns as module_urls from .notification import urlpatterns as notification_urls @@ -32,9 +30,7 @@ *dashboard_urls, *estimate_urls, *external_urls, - *importer_urls, *inbox_urls, - *integration_urls, *issue_urls, *module_urls, *notification_urls, diff --git a/apiserver/plane/app/urls/cycle.py b/apiserver/plane/app/urls/cycle.py index 740b0ab4386..2e1779420da 100644 --- a/apiserver/plane/app/urls/cycle.py +++ b/apiserver/plane/app/urls/cycle.py @@ -8,6 +8,7 @@ CycleFavoriteViewSet, TransferCycleIssueEndpoint, CycleUserPropertiesEndpoint, + CycleArchiveUnarchiveEndpoint, ) @@ -90,4 +91,14 @@ CycleUserPropertiesEndpoint.as_view(), name="cycle-user-filters", ), + path( + "workspaces//projects//cycles//archive/", + CycleArchiveUnarchiveEndpoint.as_view(), + name="cycle-archive-unarchive", + ), + path( + "workspaces//projects//archived-cycles/", + CycleArchiveUnarchiveEndpoint.as_view(), + name="cycle-archive-unarchive", + ), ] diff --git a/apiserver/plane/app/urls/external.py b/apiserver/plane/app/urls/external.py index 774e6fb7cd3..8db87a24928 100644 --- a/apiserver/plane/app/urls/external.py +++ b/apiserver/plane/app/urls/external.py @@ -2,7 +2,6 @@ from plane.app.views import UnsplashEndpoint -from plane.app.views import ReleaseNotesEndpoint from plane.app.views import GPTIntegrationEndpoint @@ -12,11 +11,6 @@ UnsplashEndpoint.as_view(), name="unsplash", ), - path( - "release-notes/", - ReleaseNotesEndpoint.as_view(), - name="release-notes", - ), path( "workspaces//projects//ai-assistant/", GPTIntegrationEndpoint.as_view(), diff --git a/apiserver/plane/app/urls/importer.py b/apiserver/plane/app/urls/importer.py deleted file mode 100644 index f3a018d7894..00000000000 --- a/apiserver/plane/app/urls/importer.py +++ /dev/null @@ -1,37 +0,0 @@ -from django.urls import path - - -from plane.app.views import ( - ServiceIssueImportSummaryEndpoint, - ImportServiceEndpoint, - UpdateServiceImportStatusEndpoint, -) - - -urlpatterns = [ - path( - "workspaces//importers//", - ServiceIssueImportSummaryEndpoint.as_view(), - name="importer-summary", - ), - path( - "workspaces//projects/importers//", - ImportServiceEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//importers/", - ImportServiceEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//importers///", - ImportServiceEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//projects//service//importers//", - UpdateServiceImportStatusEndpoint.as_view(), - name="importer-status", - ), -] diff --git a/apiserver/plane/app/urls/integration.py b/apiserver/plane/app/urls/integration.py deleted file mode 100644 index cf3f82d5a49..00000000000 --- a/apiserver/plane/app/urls/integration.py +++ /dev/null @@ -1,150 +0,0 @@ -from django.urls import path - - -from plane.app.views import ( - IntegrationViewSet, - WorkspaceIntegrationViewSet, - GithubRepositoriesEndpoint, - GithubRepositorySyncViewSet, - GithubIssueSyncViewSet, - GithubCommentSyncViewSet, - BulkCreateGithubIssueSyncEndpoint, - SlackProjectSyncViewSet, -) - - -urlpatterns = [ - path( - "integrations/", - IntegrationViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="integrations", - ), - path( - "integrations//", - IntegrationViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="integrations", - ), - path( - "workspaces//workspace-integrations/", - WorkspaceIntegrationViewSet.as_view( - { - "get": "list", - } - ), - name="workspace-integrations", - ), - path( - "workspaces//workspace-integrations//", - WorkspaceIntegrationViewSet.as_view( - { - "post": "create", - } - ), - name="workspace-integrations", - ), - path( - "workspaces//workspace-integrations//provider/", - WorkspaceIntegrationViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - name="workspace-integrations", - ), - # Github Integrations - path( - "workspaces//workspace-integrations//github-repositories/", - GithubRepositoriesEndpoint.as_view(), - ), - path( - "workspaces//projects//workspace-integrations//github-repository-sync/", - GithubRepositorySyncViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - ), - path( - "workspaces//projects//workspace-integrations//github-repository-sync//", - GithubRepositorySyncViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync/", - GithubIssueSyncViewSet.as_view( - { - "post": "create", - "get": "list", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", - BulkCreateGithubIssueSyncEndpoint.as_view(), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync//", - GithubIssueSyncViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/", - GithubCommentSyncViewSet.as_view( - { - "post": "create", - "get": "list", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//", - GithubCommentSyncViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - ), - ## End Github Integrations - # Slack Integration - path( - "workspaces//projects//workspace-integrations//project-slack-sync/", - SlackProjectSyncViewSet.as_view( - { - "post": "create", - "get": "list", - } - ), - ), - path( - "workspaces//projects//workspace-integrations//project-slack-sync//", - SlackProjectSyncViewSet.as_view( - { - "delete": "destroy", - "get": "retrieve", - } - ), - ), - ## End Slack Integration -] diff --git a/apiserver/plane/app/urls/issue.py b/apiserver/plane/app/urls/issue.py index 4ee70450b37..0d3b9e0634c 100644 --- a/apiserver/plane/app/urls/issue.py +++ b/apiserver/plane/app/urls/issue.py @@ -1,30 +1,26 @@ from django.urls import path - from plane.app.views import ( - IssueListEndpoint, - IssueViewSet, - LabelViewSet, BulkCreateIssueLabelsEndpoint, BulkDeleteIssuesEndpoint, - BulkImportIssuesEndpoint, - UserWorkSpaceIssues, SubIssuesEndpoint, IssueLinkViewSet, IssueAttachmentEndpoint, + CommentReactionViewSet, ExportIssuesEndpoint, IssueActivityEndpoint, + IssueArchiveViewSet, IssueCommentViewSet, - IssueSubscriberViewSet, + IssueDraftViewSet, + IssueListEndpoint, IssueReactionViewSet, - CommentReactionViewSet, - IssueUserDisplayPropertyEndpoint, - IssueArchiveViewSet, IssueRelationViewSet, - IssueDraftViewSet, + IssueSubscriberViewSet, + IssueUserDisplayPropertyEndpoint, + IssueViewSet, + LabelViewSet, ) - urlpatterns = [ path( "workspaces//projects//issues/list/", @@ -85,18 +81,7 @@ BulkDeleteIssuesEndpoint.as_view(), name="project-issues-bulk", ), - path( - "workspaces//projects//bulk-import-issues//", - BulkImportIssuesEndpoint.as_view(), - name="project-issues-bulk", - ), - # deprecated endpoint TODO: remove once confirmed - path( - "workspaces//my-issues/", - UserWorkSpaceIssues.as_view(), - name="workspace-issues", - ), - ## + ## path( "workspaces//projects//issues//sub-issues/", SubIssuesEndpoint.as_view(), diff --git a/apiserver/plane/app/urls/module.py b/apiserver/plane/app/urls/module.py index 5e9f4f1230c..a730fcd5054 100644 --- a/apiserver/plane/app/urls/module.py +++ b/apiserver/plane/app/urls/module.py @@ -6,8 +6,8 @@ ModuleIssueViewSet, ModuleLinkViewSet, ModuleFavoriteViewSet, - BulkImportModulesEndpoint, ModuleUserPropertiesEndpoint, + ModuleArchiveUnarchiveEndpoint, ) @@ -106,14 +106,19 @@ ), name="user-favorite-module", ), - path( - "workspaces//projects//bulk-import-modules//", - BulkImportModulesEndpoint.as_view(), - name="bulk-modules-create", - ), path( "workspaces//projects//modules//user-properties/", ModuleUserPropertiesEndpoint.as_view(), name="cycle-user-filters", ), + path( + "workspaces//projects//modules//archive/", + ModuleArchiveUnarchiveEndpoint.as_view(), + name="module-archive-unarchive", + ), + path( + "workspaces//projects//archived-modules/", + ModuleArchiveUnarchiveEndpoint.as_view(), + name="module-archive-unarchive", + ), ] diff --git a/apiserver/plane/app/urls/project.py b/apiserver/plane/app/urls/project.py index f8ecac4c068..7ea636df8e9 100644 --- a/apiserver/plane/app/urls/project.py +++ b/apiserver/plane/app/urls/project.py @@ -14,6 +14,7 @@ ProjectPublicCoverImagesEndpoint, ProjectDeployBoardViewSet, UserProjectRolesEndpoint, + ProjectArchiveUnarchiveEndpoint, ) @@ -175,4 +176,9 @@ ), name="project-deploy-board", ), + path( + "workspaces//projects//archive/", + ProjectArchiveUnarchiveEndpoint.as_view(), + name="project-archive-unarchive", + ), ] diff --git a/apiserver/plane/app/urls/workspace.py b/apiserver/plane/app/urls/workspace.py index a70ff18e535..8b21bb9e1b2 100644 --- a/apiserver/plane/app/urls/workspace.py +++ b/apiserver/plane/app/urls/workspace.py @@ -22,6 +22,7 @@ WorkspaceUserPropertiesEndpoint, WorkspaceStatesEndpoint, WorkspaceEstimatesEndpoint, + ExportWorkspaceUserActivityEndpoint, WorkspaceModulesEndpoint, WorkspaceCyclesEndpoint, ) @@ -191,6 +192,11 @@ WorkspaceUserActivityEndpoint.as_view(), name="workspace-user-activity", ), + path( + "workspaces//user-activity//export/", + ExportWorkspaceUserActivityEndpoint.as_view(), + name="export-workspace-user-activity", + ), path( "workspaces//user-profile//", WorkspaceUserProfileEndpoint.as_view(), diff --git a/apiserver/plane/app/urls_deprecated.py b/apiserver/plane/app/urls_deprecated.py deleted file mode 100644 index 2a47285aa21..00000000000 --- a/apiserver/plane/app/urls_deprecated.py +++ /dev/null @@ -1,1810 +0,0 @@ -from django.urls import path - -from rest_framework_simplejwt.views import TokenRefreshView - -# Create your urls here. - -from plane.app.views import ( - # Authentication - SignUpEndpoint, - SignInEndpoint, - SignOutEndpoint, - MagicSignInEndpoint, - MagicSignInGenerateEndpoint, - OauthEndpoint, - ## End Authentication - # Auth Extended - ForgotPasswordEndpoint, - VerifyEmailEndpoint, - ResetPasswordEndpoint, - RequestEmailVerificationEndpoint, - ChangePasswordEndpoint, - ## End Auth Extender - # User - UserEndpoint, - UpdateUserOnBoardedEndpoint, - UpdateUserTourCompletedEndpoint, - UserActivityEndpoint, - ## End User - # Workspaces - WorkSpaceViewSet, - UserWorkSpacesEndpoint, - InviteWorkspaceEndpoint, - JoinWorkspaceEndpoint, - WorkSpaceMemberViewSet, - WorkspaceMembersEndpoint, - WorkspaceInvitationsViewset, - UserWorkspaceInvitationsEndpoint, - WorkspaceMemberUserEndpoint, - WorkspaceMemberUserViewsEndpoint, - WorkSpaceAvailabilityCheckEndpoint, - TeamMemberViewSet, - AddTeamToProjectEndpoint, - UserLastProjectWithWorkspaceEndpoint, - UserWorkspaceInvitationEndpoint, - UserActivityGraphEndpoint, - UserIssueCompletedGraphEndpoint, - UserWorkspaceDashboardEndpoint, - WorkspaceThemeViewSet, - WorkspaceUserProfileStatsEndpoint, - WorkspaceUserActivityEndpoint, - WorkspaceUserProfileEndpoint, - WorkspaceUserProfileIssuesEndpoint, - WorkspaceLabelsEndpoint, - LeaveWorkspaceEndpoint, - ## End Workspaces - # File Assets - FileAssetEndpoint, - UserAssetsEndpoint, - ## End File Assets - # Projects - ProjectViewSet, - InviteProjectEndpoint, - ProjectMemberViewSet, - ProjectMemberEndpoint, - ProjectMemberInvitationsViewset, - ProjectMemberUserEndpoint, - AddMemberToProjectEndpoint, - ProjectJoinEndpoint, - UserProjectInvitationsViewset, - ProjectIdentifierEndpoint, - ProjectFavoritesViewSet, - LeaveProjectEndpoint, - ProjectPublicCoverImagesEndpoint, - ## End Projects - # Issues - IssueViewSet, - WorkSpaceIssuesEndpoint, - IssueActivityEndpoint, - IssueCommentViewSet, - UserWorkSpaceIssues, - BulkDeleteIssuesEndpoint, - BulkImportIssuesEndpoint, - ProjectUserViewsEndpoint, - IssueUserDisplayPropertyEndpoint, - LabelViewSet, - SubIssuesEndpoint, - IssueLinkViewSet, - BulkCreateIssueLabelsEndpoint, - IssueAttachmentEndpoint, - IssueArchiveViewSet, - IssueSubscriberViewSet, - IssueCommentPublicViewSet, - IssueReactionViewSet, - IssueRelationViewSet, - CommentReactionViewSet, - IssueDraftViewSet, - ## End Issues - # States - StateViewSet, - ## End States - # Estimates - ProjectEstimatePointEndpoint, - BulkEstimatePointEndpoint, - ## End Estimates - # Views - GlobalViewViewSet, - GlobalViewIssuesViewSet, - IssueViewViewSet, - IssueViewFavoriteViewSet, - ## End Views - # Cycles - CycleViewSet, - CycleIssueViewSet, - CycleDateCheckEndpoint, - CycleFavoriteViewSet, - TransferCycleIssueEndpoint, - ## End Cycles - # Modules - ModuleViewSet, - ModuleIssueViewSet, - ModuleFavoriteViewSet, - ModuleLinkViewSet, - BulkImportModulesEndpoint, - ## End Modules - # Pages - PageViewSet, - PageLogEndpoint, - SubPagesEndpoint, - PageFavoriteViewSet, - CreateIssueFromBlockEndpoint, - ## End Pages - # Api Tokens - ApiTokenEndpoint, - ## End Api Tokens - # Integrations - IntegrationViewSet, - WorkspaceIntegrationViewSet, - GithubRepositoriesEndpoint, - GithubRepositorySyncViewSet, - GithubIssueSyncViewSet, - GithubCommentSyncViewSet, - BulkCreateGithubIssueSyncEndpoint, - SlackProjectSyncViewSet, - ## End Integrations - # Importer - ServiceIssueImportSummaryEndpoint, - ImportServiceEndpoint, - UpdateServiceImportStatusEndpoint, - ## End importer - # Search - GlobalSearchEndpoint, - IssueSearchEndpoint, - ## End Search - # External - GPTIntegrationEndpoint, - ReleaseNotesEndpoint, - UnsplashEndpoint, - ## End External - # Inbox - InboxViewSet, - InboxIssueViewSet, - ## End Inbox - # Analytics - AnalyticsEndpoint, - AnalyticViewViewset, - SavedAnalyticEndpoint, - ExportAnalyticsEndpoint, - DefaultAnalyticsEndpoint, - ## End Analytics - # Notification - NotificationViewSet, - UnreadNotificationEndpoint, - MarkAllReadNotificationViewSet, - ## End Notification - # Public Boards - ProjectDeployBoardViewSet, - ProjectIssuesPublicEndpoint, - ProjectDeployBoardPublicSettingsEndpoint, - IssueReactionPublicViewSet, - CommentReactionPublicViewSet, - InboxIssuePublicViewSet, - IssueVotePublicViewSet, - WorkspaceProjectDeployBoardEndpoint, - IssueRetrievePublicEndpoint, - ## End Public Boards - ## Exporter - ExportIssuesEndpoint, - ## End Exporter - # Configuration - ConfigurationEndpoint, - ## End Configuration -) - - -# TODO: Delete this file -# This url file has been deprecated use apiserver/plane/urls folder to create new urls - -urlpatterns = [ - # Social Auth - path("social-auth/", OauthEndpoint.as_view(), name="oauth"), - # Auth - path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"), - path("sign-in/", SignInEndpoint.as_view(), name="sign-in"), - path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"), - # Magic Sign In/Up - path( - "magic-generate/", - MagicSignInGenerateEndpoint.as_view(), - name="magic-generate", - ), - path( - "magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in" - ), - path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), - # Email verification - path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), - path( - "request-email-verify/", - RequestEmailVerificationEndpoint.as_view(), - name="request-reset-email", - ), - # Password Manipulation - path( - "reset-password///", - ResetPasswordEndpoint.as_view(), - name="password-reset", - ), - path( - "forgot-password/", - ForgotPasswordEndpoint.as_view(), - name="forgot-password", - ), - # User Profile - path( - "users/me/", - UserEndpoint.as_view( - {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} - ), - name="users", - ), - path( - "users/me/settings/", - UserEndpoint.as_view( - { - "get": "retrieve_user_settings", - } - ), - name="users", - ), - path( - "users/me/change-password/", - ChangePasswordEndpoint.as_view(), - name="change-password", - ), - path( - "users/me/onboard/", - UpdateUserOnBoardedEndpoint.as_view(), - name="user-onboard", - ), - path( - "users/me/tour-completed/", - UpdateUserTourCompletedEndpoint.as_view(), - name="user-tour", - ), - path( - "users/workspaces//activities/", - UserActivityEndpoint.as_view(), - name="user-activities", - ), - # user workspaces - path( - "users/me/workspaces/", - UserWorkSpacesEndpoint.as_view(), - name="user-workspace", - ), - # user workspace invitations - path( - "users/me/invitations/workspaces/", - UserWorkspaceInvitationsEndpoint.as_view( - {"get": "list", "post": "create"} - ), - name="user-workspace-invitations", - ), - # user workspace invitation - path( - "users/me/invitations//", - UserWorkspaceInvitationEndpoint.as_view( - { - "get": "retrieve", - } - ), - name="workspace", - ), - # user join workspace - # User Graphs - path( - "users/me/workspaces//activity-graph/", - UserActivityGraphEndpoint.as_view(), - name="user-activity-graph", - ), - path( - "users/me/workspaces//issues-completed-graph/", - UserIssueCompletedGraphEndpoint.as_view(), - name="completed-graph", - ), - path( - "users/me/workspaces//dashboard/", - UserWorkspaceDashboardEndpoint.as_view(), - name="user-workspace-dashboard", - ), - ## User Graph - path( - "users/me/invitations/workspaces///join/", - JoinWorkspaceEndpoint.as_view(), - name="user-join-workspace", - ), - # user project invitations - path( - "users/me/invitations/projects/", - UserProjectInvitationsViewset.as_view( - {"get": "list", "post": "create"} - ), - name="user-project-invitaions", - ), - ## Workspaces ## - path( - "workspace-slug-check/", - WorkSpaceAvailabilityCheckEndpoint.as_view(), - name="workspace-availability", - ), - path( - "workspaces/", - WorkSpaceViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="workspace", - ), - path( - "workspaces//", - WorkSpaceViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="workspace", - ), - path( - "workspaces//invite/", - InviteWorkspaceEndpoint.as_view(), - name="workspace", - ), - path( - "workspaces//invitations/", - WorkspaceInvitationsViewset.as_view({"get": "list"}), - name="workspace", - ), - path( - "workspaces//invitations//", - WorkspaceInvitationsViewset.as_view( - { - "delete": "destroy", - "get": "retrieve", - } - ), - name="workspace", - ), - path( - "workspaces//members/", - WorkSpaceMemberViewSet.as_view({"get": "list"}), - name="workspace", - ), - path( - "workspaces//members//", - WorkSpaceMemberViewSet.as_view( - { - "patch": "partial_update", - "delete": "destroy", - "get": "retrieve", - } - ), - name="workspace", - ), - path( - "workspaces//workspace-members/", - WorkspaceMembersEndpoint.as_view(), - name="workspace-members", - ), - path( - "workspaces//teams/", - TeamMemberViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="workspace", - ), - path( - "workspaces//teams//", - TeamMemberViewSet.as_view( - { - "put": "update", - "patch": "partial_update", - "delete": "destroy", - "get": "retrieve", - } - ), - name="workspace", - ), - path( - "users/last-visited-workspace/", - UserLastProjectWithWorkspaceEndpoint.as_view(), - name="workspace-project-details", - ), - path( - "workspaces//workspace-members/me/", - WorkspaceMemberUserEndpoint.as_view(), - name="workspace-member-details", - ), - path( - "workspaces//workspace-views/", - WorkspaceMemberUserViewsEndpoint.as_view(), - name="workspace-member-details", - ), - path( - "workspaces//workspace-themes/", - WorkspaceThemeViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="workspace-themes", - ), - path( - "workspaces//workspace-themes//", - WorkspaceThemeViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="workspace-themes", - ), - path( - "workspaces//user-stats//", - WorkspaceUserProfileStatsEndpoint.as_view(), - name="workspace-user-stats", - ), - path( - "workspaces//user-activity//", - WorkspaceUserActivityEndpoint.as_view(), - name="workspace-user-activity", - ), - path( - "workspaces//user-profile//", - WorkspaceUserProfileEndpoint.as_view(), - name="workspace-user-profile-page", - ), - path( - "workspaces//user-issues//", - WorkspaceUserProfileIssuesEndpoint.as_view(), - name="workspace-user-profile-issues", - ), - path( - "workspaces//labels/", - WorkspaceLabelsEndpoint.as_view(), - name="workspace-labels", - ), - path( - "workspaces//members/leave/", - LeaveWorkspaceEndpoint.as_view(), - name="workspace-labels", - ), - ## End Workspaces ## - # Projects - path( - "workspaces//projects/", - ProjectViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project", - ), - path( - "workspaces//projects//", - ProjectViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project", - ), - path( - "workspaces//project-identifiers/", - ProjectIdentifierEndpoint.as_view(), - name="project-identifiers", - ), - path( - "workspaces//projects//invite/", - InviteProjectEndpoint.as_view(), - name="project", - ), - path( - "workspaces//projects//members/", - ProjectMemberViewSet.as_view({"get": "list"}), - name="project", - ), - path( - "workspaces//projects//members//", - ProjectMemberViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project", - ), - path( - "workspaces//projects//project-members/", - ProjectMemberEndpoint.as_view(), - name="project", - ), - path( - "workspaces//projects//members/add/", - AddMemberToProjectEndpoint.as_view(), - name="project", - ), - path( - "workspaces//projects/join/", - ProjectJoinEndpoint.as_view(), - name="project", - ), - path( - "workspaces//projects//team-invite/", - AddTeamToProjectEndpoint.as_view(), - name="projects", - ), - path( - "workspaces//projects//invitations/", - ProjectMemberInvitationsViewset.as_view({"get": "list"}), - name="workspace", - ), - path( - "workspaces//projects//invitations//", - ProjectMemberInvitationsViewset.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - name="project", - ), - path( - "workspaces//projects//project-views/", - ProjectUserViewsEndpoint.as_view(), - name="project-view", - ), - path( - "workspaces//projects//project-members/me/", - ProjectMemberUserEndpoint.as_view(), - name="project-view", - ), - path( - "workspaces//user-favorite-projects/", - ProjectFavoritesViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project", - ), - path( - "workspaces//user-favorite-projects//", - ProjectFavoritesViewSet.as_view( - { - "delete": "destroy", - } - ), - name="project", - ), - path( - "workspaces//projects//members/leave/", - LeaveProjectEndpoint.as_view(), - name="project", - ), - path( - "project-covers/", - ProjectPublicCoverImagesEndpoint.as_view(), - name="project-covers", - ), - # End Projects - # States - path( - "workspaces//projects//states/", - StateViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-states", - ), - path( - "workspaces//projects//states//", - StateViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-state", - ), - # End States ## - # Estimates - path( - "workspaces//projects//project-estimates/", - ProjectEstimatePointEndpoint.as_view(), - name="project-estimate-points", - ), - path( - "workspaces//projects//estimates/", - BulkEstimatePointEndpoint.as_view( - { - "get": "list", - "post": "create", - } - ), - name="bulk-create-estimate-points", - ), - path( - "workspaces//projects//estimates//", - BulkEstimatePointEndpoint.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="bulk-create-estimate-points", - ), - # End Estimates ## - # Views - path( - "workspaces//projects//views/", - IssueViewViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-view", - ), - path( - "workspaces//projects//views//", - IssueViewViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-view", - ), - path( - "workspaces//views/", - GlobalViewViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="global-view", - ), - path( - "workspaces//views//", - GlobalViewViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="global-view", - ), - path( - "workspaces//issues/", - GlobalViewIssuesViewSet.as_view( - { - "get": "list", - } - ), - name="global-view-issues", - ), - path( - "workspaces//projects//user-favorite-views/", - IssueViewFavoriteViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="user-favorite-view", - ), - path( - "workspaces//projects//user-favorite-views//", - IssueViewFavoriteViewSet.as_view( - { - "delete": "destroy", - } - ), - name="user-favorite-view", - ), - ## End Views - ## Cycles - path( - "workspaces//projects//cycles/", - CycleViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-cycle", - ), - path( - "workspaces//projects//cycles//", - CycleViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-cycle", - ), - path( - "workspaces//projects//cycles//cycle-issues/", - CycleIssueViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-cycle", - ), - path( - "workspaces//projects//cycles//cycle-issues//", - CycleIssueViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-cycle", - ), - path( - "workspaces//projects//cycles/date-check/", - CycleDateCheckEndpoint.as_view(), - name="project-cycle", - ), - path( - "workspaces//projects//user-favorite-cycles/", - CycleFavoriteViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="user-favorite-cycle", - ), - path( - "workspaces//projects//user-favorite-cycles//", - CycleFavoriteViewSet.as_view( - { - "delete": "destroy", - } - ), - name="user-favorite-cycle", - ), - path( - "workspaces//projects//cycles//transfer-issues/", - TransferCycleIssueEndpoint.as_view(), - name="transfer-issues", - ), - ## End Cycles - # Issue - path( - "workspaces//projects//issues/", - IssueViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue", - ), - path( - "workspaces//projects//issues//", - IssueViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue", - ), - path( - "workspaces//projects//issue-labels/", - LabelViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-labels", - ), - path( - "workspaces//projects//issue-labels//", - LabelViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-labels", - ), - path( - "workspaces//projects//bulk-create-labels/", - BulkCreateIssueLabelsEndpoint.as_view(), - name="project-bulk-labels", - ), - path( - "workspaces//projects//bulk-delete-issues/", - BulkDeleteIssuesEndpoint.as_view(), - name="project-issues-bulk", - ), - path( - "workspaces//projects//bulk-import-issues//", - BulkImportIssuesEndpoint.as_view(), - name="project-issues-bulk", - ), - path( - "workspaces//my-issues/", - UserWorkSpaceIssues.as_view(), - name="workspace-issues", - ), - path( - "workspaces//projects//issues//sub-issues/", - SubIssuesEndpoint.as_view(), - name="sub-issues", - ), - path( - "workspaces//projects//issues//issue-links/", - IssueLinkViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-links", - ), - path( - "workspaces//projects//issues//issue-links//", - IssueLinkViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-links", - ), - path( - "workspaces//projects//issues//issue-attachments/", - IssueAttachmentEndpoint.as_view(), - name="project-issue-attachments", - ), - path( - "workspaces//projects//issues//issue-attachments//", - IssueAttachmentEndpoint.as_view(), - name="project-issue-attachments", - ), - path( - "workspaces//export-issues/", - ExportIssuesEndpoint.as_view(), - name="export-issues", - ), - ## End Issues - ## Issue Activity - path( - "workspaces//projects//issues//history/", - IssueActivityEndpoint.as_view(), - name="project-issue-history", - ), - ## Issue Activity - ## IssueComments - path( - "workspaces//projects//issues//comments/", - IssueCommentViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-comment", - ), - path( - "workspaces//projects//issues//comments//", - IssueCommentViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-comment", - ), - ## End IssueComments - # Issue Subscribers - path( - "workspaces//projects//issues//issue-subscribers/", - IssueSubscriberViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-subscribers", - ), - path( - "workspaces//projects//issues//issue-subscribers//", - IssueSubscriberViewSet.as_view({"delete": "destroy"}), - name="project-issue-subscribers", - ), - path( - "workspaces//projects//issues//subscribe/", - IssueSubscriberViewSet.as_view( - { - "get": "subscription_status", - "post": "subscribe", - "delete": "unsubscribe", - } - ), - name="project-issue-subscribers", - ), - ## End Issue Subscribers - # Issue Reactions - path( - "workspaces//projects//issues//reactions/", - IssueReactionViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-reactions", - ), - path( - "workspaces//projects//issues//reactions//", - IssueReactionViewSet.as_view( - { - "delete": "destroy", - } - ), - name="project-issue-reactions", - ), - ## End Issue Reactions - # Comment Reactions - path( - "workspaces//projects//comments//reactions/", - CommentReactionViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-comment-reactions", - ), - path( - "workspaces//projects//comments//reactions//", - CommentReactionViewSet.as_view( - { - "delete": "destroy", - } - ), - name="project-issue-comment-reactions", - ), - ## End Comment Reactions - ## IssueProperty - path( - "workspaces//projects//issue-display-properties/", - IssueUserDisplayPropertyEndpoint.as_view(), - name="project-issue-display-properties", - ), - ## IssueProperty Ebd - ## Issue Archives - path( - "workspaces//projects//archived-issues/", - IssueArchiveViewSet.as_view( - { - "get": "list", - } - ), - name="project-issue-archive", - ), - path( - "workspaces//projects//archived-issues//", - IssueArchiveViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - name="project-issue-archive", - ), - path( - "workspaces//projects//unarchive//", - IssueArchiveViewSet.as_view( - { - "post": "unarchive", - } - ), - name="project-issue-archive", - ), - ## End Issue Archives - ## Issue Relation - path( - "workspaces//projects//issues//issue-relation/", - IssueRelationViewSet.as_view( - { - "post": "create", - } - ), - name="issue-relation", - ), - path( - "workspaces//projects//issues//issue-relation//", - IssueRelationViewSet.as_view( - { - "delete": "destroy", - } - ), - name="issue-relation", - ), - ## End Issue Relation - ## Issue Drafts - path( - "workspaces//projects//issue-drafts/", - IssueDraftViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-draft", - ), - path( - "workspaces//projects//issue-drafts//", - IssueDraftViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-draft", - ), - ## End Issue Drafts - ## File Assets - path( - "workspaces//file-assets/", - FileAssetEndpoint.as_view(), - name="file-assets", - ), - path( - "workspaces/file-assets///", - FileAssetEndpoint.as_view(), - name="file-assets", - ), - path( - "users/file-assets/", - UserAssetsEndpoint.as_view(), - name="user-file-assets", - ), - path( - "users/file-assets//", - UserAssetsEndpoint.as_view(), - name="user-file-assets", - ), - ## End File Assets - ## Modules - path( - "workspaces//projects//modules/", - ModuleViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-modules", - ), - path( - "workspaces//projects//modules//", - ModuleViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-modules", - ), - path( - "workspaces//projects//modules//module-issues/", - ModuleIssueViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-module-issues", - ), - path( - "workspaces//projects//modules//module-issues//", - ModuleIssueViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-module-issues", - ), - path( - "workspaces//projects//modules//module-links/", - ModuleLinkViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-issue-module-links", - ), - path( - "workspaces//projects//modules//module-links//", - ModuleLinkViewSet.as_view( - { - "get": "retrieve", - "put": "update", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-issue-module-links", - ), - path( - "workspaces//projects//user-favorite-modules/", - ModuleFavoriteViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="user-favorite-module", - ), - path( - "workspaces//projects//user-favorite-modules//", - ModuleFavoriteViewSet.as_view( - { - "delete": "destroy", - } - ), - name="user-favorite-module", - ), - path( - "workspaces//projects//bulk-import-modules//", - BulkImportModulesEndpoint.as_view(), - name="bulk-modules-create", - ), - ## End Modules - # Pages - path( - "workspaces//projects//pages/", - PageViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-pages", - ), - path( - "workspaces//projects//pages//", - PageViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-pages", - ), - path( - "workspaces//projects//pages//archive/", - PageViewSet.as_view( - { - "post": "archive", - } - ), - name="project-page-archive", - ), - path( - "workspaces//projects//pages//unarchive/", - PageViewSet.as_view( - { - "post": "unarchive", - } - ), - name="project-page-unarchive", - ), - path( - "workspaces//projects//archived-pages/", - PageViewSet.as_view( - { - "get": "archive_list", - } - ), - name="project-pages", - ), - path( - "workspaces//projects//pages//lock/", - PageViewSet.as_view( - { - "post": "lock", - } - ), - name="project-pages", - ), - path( - "workspaces//projects//pages//unlock/", - PageViewSet.as_view( - { - "post": "unlock", - } - ), - ), - path( - "workspaces//projects//pages//transactions/", - PageLogEndpoint.as_view(), - name="page-transactions", - ), - path( - "workspaces//projects//pages//transactions//", - PageLogEndpoint.as_view(), - name="page-transactions", - ), - path( - "workspaces//projects//pages//sub-pages/", - SubPagesEndpoint.as_view(), - name="sub-page", - ), - path( - "workspaces//projects//estimates/", - BulkEstimatePointEndpoint.as_view( - { - "get": "list", - "post": "create", - } - ), - name="bulk-create-estimate-points", - ), - path( - "workspaces//projects//estimates//", - BulkEstimatePointEndpoint.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="bulk-create-estimate-points", - ), - path( - "workspaces//projects//user-favorite-pages/", - PageFavoriteViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="user-favorite-pages", - ), - path( - "workspaces//projects//user-favorite-pages//", - PageFavoriteViewSet.as_view( - { - "delete": "destroy", - } - ), - name="user-favorite-pages", - ), - path( - "workspaces//projects//pages//page-blocks//issues/", - CreateIssueFromBlockEndpoint.as_view(), - name="page-block-issues", - ), - ## End Pages - # API Tokens - path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"), - path( - "api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens" - ), - ## End API Tokens - # Integrations - path( - "integrations/", - IntegrationViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="integrations", - ), - path( - "integrations//", - IntegrationViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="integrations", - ), - path( - "workspaces//workspace-integrations/", - WorkspaceIntegrationViewSet.as_view( - { - "get": "list", - } - ), - name="workspace-integrations", - ), - path( - "workspaces//workspace-integrations//", - WorkspaceIntegrationViewSet.as_view( - { - "post": "create", - } - ), - name="workspace-integrations", - ), - path( - "workspaces//workspace-integrations//provider/", - WorkspaceIntegrationViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - name="workspace-integrations", - ), - # Github Integrations - path( - "workspaces//workspace-integrations//github-repositories/", - GithubRepositoriesEndpoint.as_view(), - ), - path( - "workspaces//projects//workspace-integrations//github-repository-sync/", - GithubRepositorySyncViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - ), - path( - "workspaces//projects//workspace-integrations//github-repository-sync//", - GithubRepositorySyncViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync/", - GithubIssueSyncViewSet.as_view( - { - "post": "create", - "get": "list", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", - BulkCreateGithubIssueSyncEndpoint.as_view(), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync//", - GithubIssueSyncViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/", - GithubCommentSyncViewSet.as_view( - { - "post": "create", - "get": "list", - } - ), - ), - path( - "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//", - GithubCommentSyncViewSet.as_view( - { - "get": "retrieve", - "delete": "destroy", - } - ), - ), - ## End Github Integrations - # Slack Integration - path( - "workspaces//projects//workspace-integrations//project-slack-sync/", - SlackProjectSyncViewSet.as_view( - { - "post": "create", - "get": "list", - } - ), - ), - path( - "workspaces//projects//workspace-integrations//project-slack-sync//", - SlackProjectSyncViewSet.as_view( - { - "delete": "destroy", - "get": "retrieve", - } - ), - ), - ## End Slack Integration - ## End Integrations - # Importer - path( - "workspaces//importers//", - ServiceIssueImportSummaryEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//projects/importers//", - ImportServiceEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//importers/", - ImportServiceEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//importers///", - ImportServiceEndpoint.as_view(), - name="importer", - ), - path( - "workspaces//projects//service//importers//", - UpdateServiceImportStatusEndpoint.as_view(), - name="importer", - ), - ## End Importer - # Search - path( - "workspaces//search/", - GlobalSearchEndpoint.as_view(), - name="global-search", - ), - path( - "workspaces//projects//search-issues/", - IssueSearchEndpoint.as_view(), - name="project-issue-search", - ), - ## End Search - # External - path( - "workspaces//projects//ai-assistant/", - GPTIntegrationEndpoint.as_view(), - name="importer", - ), - path( - "release-notes/", - ReleaseNotesEndpoint.as_view(), - name="release-notes", - ), - path( - "unsplash/", - UnsplashEndpoint.as_view(), - name="release-notes", - ), - ## End External - # Inbox - path( - "workspaces//projects//inboxes/", - InboxViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="inbox", - ), - path( - "workspaces//projects//inboxes//", - InboxViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="inbox", - ), - path( - "workspaces//projects//inboxes//inbox-issues/", - InboxIssueViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="inbox-issue", - ), - path( - "workspaces//projects//inboxes//inbox-issues//", - InboxIssueViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="inbox-issue", - ), - ## End Inbox - # Analytics - path( - "workspaces//analytics/", - AnalyticsEndpoint.as_view(), - name="plane-analytics", - ), - path( - "workspaces//analytic-view/", - AnalyticViewViewset.as_view({"get": "list", "post": "create"}), - name="analytic-view", - ), - path( - "workspaces//analytic-view//", - AnalyticViewViewset.as_view( - {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} - ), - name="analytic-view", - ), - path( - "workspaces//saved-analytic-view//", - SavedAnalyticEndpoint.as_view(), - name="saved-analytic-view", - ), - path( - "workspaces//export-analytics/", - ExportAnalyticsEndpoint.as_view(), - name="export-analytics", - ), - path( - "workspaces//default-analytics/", - DefaultAnalyticsEndpoint.as_view(), - name="default-analytics", - ), - ## End Analytics - # Notification - path( - "workspaces//users/notifications/", - NotificationViewSet.as_view( - { - "get": "list", - } - ), - name="notifications", - ), - path( - "workspaces//users/notifications//", - NotificationViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="notifications", - ), - path( - "workspaces//users/notifications//read/", - NotificationViewSet.as_view( - { - "post": "mark_read", - "delete": "mark_unread", - } - ), - name="notifications", - ), - path( - "workspaces//users/notifications//archive/", - NotificationViewSet.as_view( - { - "post": "archive", - "delete": "unarchive", - } - ), - name="notifications", - ), - path( - "workspaces//users/notifications/unread/", - UnreadNotificationEndpoint.as_view(), - name="unread-notifications", - ), - path( - "workspaces//users/notifications/mark-all-read/", - MarkAllReadNotificationViewSet.as_view( - { - "post": "create", - } - ), - name="mark-all-read-notifications", - ), - ## End Notification - # Public Boards - path( - "workspaces//projects//project-deploy-boards/", - ProjectDeployBoardViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="project-deploy-board", - ), - path( - "workspaces//projects//project-deploy-boards//", - ProjectDeployBoardViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="project-deploy-board", - ), - path( - "public/workspaces//project-boards//settings/", - ProjectDeployBoardPublicSettingsEndpoint.as_view(), - name="project-deploy-board-settings", - ), - path( - "public/workspaces//project-boards//issues/", - ProjectIssuesPublicEndpoint.as_view(), - name="project-deploy-board", - ), - path( - "public/workspaces//project-boards//issues//", - IssueRetrievePublicEndpoint.as_view(), - name="workspace-project-boards", - ), - path( - "public/workspaces//project-boards//issues//comments/", - IssueCommentPublicViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="issue-comments-project-board", - ), - path( - "public/workspaces//project-boards//issues//comments//", - IssueCommentPublicViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="issue-comments-project-board", - ), - path( - "public/workspaces//project-boards//issues//reactions/", - IssueReactionPublicViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="issue-reactions-project-board", - ), - path( - "public/workspaces//project-boards//issues//reactions//", - IssueReactionPublicViewSet.as_view( - { - "delete": "destroy", - } - ), - name="issue-reactions-project-board", - ), - path( - "public/workspaces//project-boards//comments//reactions/", - CommentReactionPublicViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="comment-reactions-project-board", - ), - path( - "public/workspaces//project-boards//comments//reactions//", - CommentReactionPublicViewSet.as_view( - { - "delete": "destroy", - } - ), - name="comment-reactions-project-board", - ), - path( - "public/workspaces//project-boards//inboxes//inbox-issues/", - InboxIssuePublicViewSet.as_view( - { - "get": "list", - "post": "create", - } - ), - name="inbox-issue", - ), - path( - "public/workspaces//project-boards//inboxes//inbox-issues//", - InboxIssuePublicViewSet.as_view( - { - "get": "retrieve", - "patch": "partial_update", - "delete": "destroy", - } - ), - name="inbox-issue", - ), - path( - "public/workspaces//project-boards//issues//votes/", - IssueVotePublicViewSet.as_view( - { - "get": "list", - "post": "create", - "delete": "destroy", - } - ), - name="issue-vote-project-board", - ), - path( - "public/workspaces//project-boards/", - WorkspaceProjectDeployBoardEndpoint.as_view(), - name="workspace-project-boards", - ), - ## End Public Boards - # Configuration - path( - "configs/", - ConfigurationEndpoint.as_view(), - name="configuration", - ), - ## End Configuration -] diff --git a/apiserver/plane/app/views/__init__.py b/apiserver/plane/app/views/__init__.py index fb47b06dc2e..90633729a07 100644 --- a/apiserver/plane/app/views/__init__.py +++ b/apiserver/plane/app/views/__init__.py @@ -1,19 +1,27 @@ -from .project import ( +from .project.base import ( ProjectViewSet, - ProjectMemberViewSet, - UserProjectInvitationsViewset, - ProjectInvitationsViewset, - AddTeamToProjectEndpoint, ProjectIdentifierEndpoint, - ProjectJoinEndpoint, ProjectUserViewsEndpoint, - ProjectMemberUserEndpoint, ProjectFavoritesViewSet, ProjectPublicCoverImagesEndpoint, ProjectDeployBoardViewSet, + ProjectArchiveUnarchiveEndpoint, +) + +from .project.invite import ( + UserProjectInvitationsViewset, + ProjectInvitationsViewset, + ProjectJoinEndpoint, +) + +from .project.member import ( + ProjectMemberViewSet, + AddTeamToProjectEndpoint, + ProjectMemberUserEndpoint, UserProjectRolesEndpoint, ) -from .user import ( + +from .user.base import ( UserEndpoint, UpdateUserOnBoardedEndpoint, UpdateUserTourCompletedEndpoint, @@ -26,70 +34,122 @@ from .base import BaseAPIView, BaseViewSet, WebhookMixin -from .workspace import ( +from .workspace.base import ( WorkSpaceViewSet, UserWorkSpacesEndpoint, WorkSpaceAvailabilityCheckEndpoint, - WorkspaceJoinEndpoint, + UserWorkspaceDashboardEndpoint, + WorkspaceThemeViewSet, + ExportWorkspaceUserActivityEndpoint +) + +from .workspace.member import ( WorkSpaceMemberViewSet, TeamMemberViewSet, + WorkspaceMemberUserEndpoint, + WorkspaceProjectMemberEndpoint, + WorkspaceMemberUserViewsEndpoint, +) +from .workspace.invite import ( WorkspaceInvitationsViewset, + WorkspaceJoinEndpoint, UserWorkspaceInvitationsViewSet, +) +from .workspace.label import ( + WorkspaceLabelsEndpoint, +) +from .workspace.state import ( + WorkspaceStatesEndpoint, +) +from .workspace.user import ( UserLastProjectWithWorkspaceEndpoint, - WorkspaceMemberUserEndpoint, - WorkspaceMemberUserViewsEndpoint, - UserActivityGraphEndpoint, - UserIssueCompletedGraphEndpoint, - UserWorkspaceDashboardEndpoint, - WorkspaceThemeViewSet, - WorkspaceUserProfileStatsEndpoint, - WorkspaceUserActivityEndpoint, - WorkspaceUserProfileEndpoint, WorkspaceUserProfileIssuesEndpoint, - WorkspaceLabelsEndpoint, - WorkspaceProjectMemberEndpoint, WorkspaceUserPropertiesEndpoint, - WorkspaceStatesEndpoint, + WorkspaceUserProfileEndpoint, + WorkspaceUserActivityEndpoint, + WorkspaceUserProfileStatsEndpoint, + UserActivityGraphEndpoint, + UserIssueCompletedGraphEndpoint, +) +from .workspace.estimate import ( WorkspaceEstimatesEndpoint, +) +from .workspace.module import ( WorkspaceModulesEndpoint, +) +from .workspace.cycle import ( WorkspaceCyclesEndpoint, ) -from .state import StateViewSet -from .view import ( + +from .state.base import StateViewSet +from .view.base import ( GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, IssueViewFavoriteViewSet, ) -from .cycle import ( +from .cycle.base import ( CycleViewSet, - CycleIssueViewSet, CycleDateCheckEndpoint, CycleFavoriteViewSet, TransferCycleIssueEndpoint, + CycleArchiveUnarchiveEndpoint, CycleUserPropertiesEndpoint, ) -from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet -from .issue import ( +from .cycle.issue import ( + CycleIssueViewSet, +) + +from .asset.base import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet +from .issue.base import ( IssueListEndpoint, IssueViewSet, - WorkSpaceIssuesEndpoint, - IssueActivityEndpoint, - IssueCommentViewSet, IssueUserDisplayPropertyEndpoint, - LabelViewSet, BulkDeleteIssuesEndpoint, - UserWorkSpaceIssues, - SubIssuesEndpoint, - IssueLinkViewSet, - BulkCreateIssueLabelsEndpoint, - IssueAttachmentEndpoint, +) + +from .issue.activity import ( + IssueActivityEndpoint, +) + +from .issue.archive import ( IssueArchiveViewSet, - IssueSubscriberViewSet, +) + +from .issue.attachment import ( + IssueAttachmentEndpoint, +) + +from .issue.comment import ( + IssueCommentViewSet, CommentReactionViewSet, - IssueReactionViewSet, +) + +from .issue.draft import IssueDraftViewSet + +from .issue.label import ( + LabelViewSet, + BulkCreateIssueLabelsEndpoint, +) + +from .issue.link import ( + IssueLinkViewSet, +) + +from .issue.relation import ( IssueRelationViewSet, - IssueDraftViewSet, +) + +from .issue.reaction import ( + IssueReactionViewSet, +) + +from .issue.sub_issue import ( + SubIssuesEndpoint, +) + +from .issue.subscriber import ( + IssueSubscriberViewSet, ) from .auth_extended import ( @@ -108,36 +168,22 @@ MagicSignInEndpoint, ) -from .module import ( +from .module.base import ( ModuleViewSet, - ModuleIssueViewSet, ModuleLinkViewSet, ModuleFavoriteViewSet, + ModuleArchiveUnarchiveEndpoint, ModuleUserPropertiesEndpoint, ) -from .api import ApiTokenEndpoint - -from .integration import ( - WorkspaceIntegrationViewSet, - IntegrationViewSet, - GithubIssueSyncViewSet, - GithubRepositorySyncViewSet, - GithubCommentSyncViewSet, - GithubRepositoriesEndpoint, - BulkCreateGithubIssueSyncEndpoint, - SlackProjectSyncViewSet, +from .module.issue import ( + ModuleIssueViewSet, ) -from .importer import ( - ServiceIssueImportSummaryEndpoint, - ImportServiceEndpoint, - UpdateServiceImportStatusEndpoint, - BulkImportIssuesEndpoint, - BulkImportModulesEndpoint, -) +from .api import ApiTokenEndpoint + -from .page import ( +from .page.base import ( PageViewSet, PageFavoriteViewSet, PageLogEndpoint, @@ -147,20 +193,19 @@ from .search import GlobalSearchEndpoint, IssueSearchEndpoint -from .external import ( +from .external.base import ( GPTIntegrationEndpoint, - ReleaseNotesEndpoint, UnsplashEndpoint, ) -from .estimate import ( +from .estimate.base import ( ProjectEstimatePointEndpoint, BulkEstimatePointEndpoint, ) -from .inbox import InboxViewSet, InboxIssueViewSet +from .inbox.base import InboxViewSet, InboxIssueViewSet -from .analytic import ( +from .analytic.base import ( AnalyticsEndpoint, AnalyticViewViewset, SavedAnalyticEndpoint, @@ -168,24 +213,23 @@ DefaultAnalyticsEndpoint, ) -from .notification import ( +from .notification.base import ( NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet, UserNotificationPreferenceEndpoint, ) -from .exporter import ExportIssuesEndpoint +from .exporter.base import ExportIssuesEndpoint from .config import ConfigurationEndpoint, MobileConfigurationEndpoint -from .webhook import ( +from .webhook.base import ( WebhookEndpoint, WebhookLogsEndpoint, WebhookSecretRegenerateEndpoint, ) -from .dashboard import ( - DashboardEndpoint, - WidgetsEndpoint -) \ No newline at end of file +from .dashboard.base import DashboardEndpoint, WidgetsEndpoint + +from .error_404 import custom_404_view diff --git a/apiserver/plane/app/views/analytic.py b/apiserver/plane/app/views/analytic/base.py similarity index 97% rename from apiserver/plane/app/views/analytic.py rename to apiserver/plane/app/views/analytic/base.py index 6eb914b236e..8e0d3220da9 100644 --- a/apiserver/plane/app/views/analytic.py +++ b/apiserver/plane/app/views/analytic/base.py @@ -1,5 +1,5 @@ # Django imports -from django.db.models import Count, Sum, F, Q +from django.db.models import Count, Sum, F from django.db.models.functions import ExtractMonth from django.utils import timezone @@ -10,7 +10,7 @@ # Module imports from plane.app.views import BaseAPIView, BaseViewSet from plane.app.permissions import WorkSpaceAdminPermission -from plane.db.models import Issue, AnalyticView, Workspace, State, Label +from plane.db.models import Issue, AnalyticView, Workspace from plane.app.serializers import AnalyticViewSerializer from plane.utils.analytics_plot import build_graph_plot from plane.bgtasks.analytic_plot_export import analytic_export_task @@ -51,8 +51,8 @@ def get(self, request, slug): if ( not x_axis or not y_axis - or not x_axis in valid_xaxis_segment - or not y_axis in valid_yaxis + or x_axis not in valid_xaxis_segment + or y_axis not in valid_yaxis ): return Response( { @@ -266,8 +266,8 @@ def post(self, request, slug): if ( not x_axis or not y_axis - or not x_axis in valid_xaxis_segment - or not y_axis in valid_yaxis + or x_axis not in valid_xaxis_segment + or y_axis not in valid_yaxis ): return Response( { diff --git a/apiserver/plane/app/views/api.py b/apiserver/plane/app/views/api.py index 86a29c7fa50..6cd349b0748 100644 --- a/apiserver/plane/app/views/api.py +++ b/apiserver/plane/app/views/api.py @@ -43,7 +43,7 @@ def post(self, request, slug): ) def get(self, request, slug, pk=None): - if pk == None: + if pk is None: api_tokens = APIToken.objects.filter( user=request.user, workspace__slug=slug ) diff --git a/apiserver/plane/app/views/asset.py b/apiserver/plane/app/views/asset/base.py similarity index 98% rename from apiserver/plane/app/views/asset.py rename to apiserver/plane/app/views/asset/base.py index fb559061011..6de4a4ee7f0 100644 --- a/apiserver/plane/app/views/asset.py +++ b/apiserver/plane/app/views/asset/base.py @@ -4,7 +4,7 @@ from rest_framework.parsers import MultiPartParser, FormParser, JSONParser # Module imports -from .base import BaseAPIView, BaseViewSet +from ..base import BaseAPIView, BaseViewSet from plane.db.models import FileAsset, Workspace from plane.app.serializers import FileAssetSerializer diff --git a/apiserver/plane/app/views/auth_extended.py b/apiserver/plane/app/views/auth_extended.py index 29cb43e3865..896f4170f22 100644 --- a/apiserver/plane/app/views/auth_extended.py +++ b/apiserver/plane/app/views/auth_extended.py @@ -16,7 +16,6 @@ from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode from django.core.validators import validate_email from django.core.exceptions import ValidationError -from django.conf import settings ## Third Party Imports from rest_framework import status @@ -172,7 +171,7 @@ def post(self, request, uidb64, token): serializer.errors, status=status.HTTP_400_BAD_REQUEST ) - except DjangoUnicodeDecodeError as indentifier: + except DjangoUnicodeDecodeError: return Response( {"error": "token is not valid, please check the new one"}, status=status.HTTP_401_UNAUTHORIZED, diff --git a/apiserver/plane/app/views/authentication.py b/apiserver/plane/app/views/authentication.py index c2b3e0b7e4e..7d898f971cf 100644 --- a/apiserver/plane/app/views/authentication.py +++ b/apiserver/plane/app/views/authentication.py @@ -7,7 +7,6 @@ from django.utils import timezone from django.core.exceptions import ValidationError from django.core.validators import validate_email -from django.conf import settings from django.contrib.auth.hashers import make_password # Third party imports @@ -65,7 +64,7 @@ def post(self, request): email = email.strip().lower() try: validate_email(email) - except ValidationError as e: + except ValidationError: return Response( {"error": "Please provide a valid email address."}, status=status.HTTP_400_BAD_REQUEST, @@ -151,7 +150,7 @@ def post(self, request): email = email.strip().lower() try: validate_email(email) - except ValidationError as e: + except ValidationError: return Response( {"error": "Please provide a valid email address."}, status=status.HTTP_400_BAD_REQUEST, @@ -238,9 +237,11 @@ def post(self, request): [ WorkspaceMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) @@ -254,9 +255,11 @@ def post(self, request): [ ProjectMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) @@ -392,9 +395,11 @@ def post(self, request): [ WorkspaceMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) @@ -408,9 +413,11 @@ def post(self, request): [ ProjectMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) diff --git a/apiserver/plane/app/views/base.py b/apiserver/plane/app/views/base.py index fa1e7559b06..1908cfdc951 100644 --- a/apiserver/plane/app/views/base.py +++ b/apiserver/plane/app/views/base.py @@ -1,30 +1,27 @@ # Python imports import zoneinfo -import json +from django.conf import settings +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.db import IntegrityError # Django imports from django.urls import resolve -from django.conf import settings from django.utils import timezone -from django.db import IntegrityError -from django.core.exceptions import ObjectDoesNotExist, ValidationError -from django.core.serializers.json import DjangoJSONEncoder +from django_filters.rest_framework import DjangoFilterBackend # Third part imports from rest_framework import status -from rest_framework import status -from rest_framework.viewsets import ModelViewSet -from rest_framework.response import Response from rest_framework.exceptions import APIException -from rest_framework.views import APIView from rest_framework.filters import SearchFilter from rest_framework.permissions import IsAuthenticated -from sentry_sdk import capture_exception -from django_filters.rest_framework import DjangoFilterBackend +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework.viewsets import ModelViewSet # Module imports -from plane.utils.paginator import BasePaginator from plane.bgtasks.webhook_task import send_webhook +from plane.utils.exception_logger import log_exception +from plane.utils.paginator import BasePaginator class TimezoneMixin: @@ -90,7 +87,7 @@ def get_queryset(self): try: return self.model.objects.all() except Exception as e: - capture_exception(e) + log_exception(e) raise APIException( "Please check the view", status.HTTP_400_BAD_REQUEST ) @@ -119,18 +116,18 @@ def handle_exception(self, exc): if isinstance(e, ObjectDoesNotExist): return Response( - {"error": f"The required object does not exist."}, + {"error": "The required object does not exist."}, status=status.HTTP_404_NOT_FOUND, ) if isinstance(e, KeyError): - capture_exception(e) + log_exception(e) return Response( - {"error": f"The required key does not exist."}, + {"error": "The required key does not exist."}, status=status.HTTP_400_BAD_REQUEST, ) - capture_exception(e) + log_exception(e) return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, @@ -226,19 +223,17 @@ def handle_exception(self, exc): if isinstance(e, ObjectDoesNotExist): return Response( - {"error": f"The required object does not exist."}, + {"error": "The required object does not exist."}, status=status.HTTP_404_NOT_FOUND, ) if isinstance(e, KeyError): return Response( - {"error": f"The required key does not exist."}, + {"error": "The required key does not exist."}, status=status.HTTP_400_BAD_REQUEST, ) - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/apiserver/plane/app/views/config.py b/apiserver/plane/app/views/config.py index b4c68879d9e..516570b775a 100644 --- a/apiserver/plane/app/views/config.py +++ b/apiserver/plane/app/views/config.py @@ -2,7 +2,6 @@ import os # Django imports -from django.conf import settings # Third party imports from rest_framework.permissions import AllowAny @@ -12,13 +11,14 @@ # Module imports from .base import BaseAPIView from plane.license.utils.instance_value import get_configuration_value - +from plane.utils.cache import cache_response class ConfigurationEndpoint(BaseAPIView): permission_classes = [ AllowAny, ] + @cache_response(60 * 60 * 2, user=False) def get(self, request): # Get all the configuration ( @@ -183,6 +183,7 @@ class MobileConfigurationEndpoint(BaseAPIView): AllowAny, ] + @cache_response(60 * 60 * 2, user=False) def get(self, request): ( GOOGLE_CLIENT_ID, diff --git a/apiserver/plane/app/views/cycle.py b/apiserver/plane/app/views/cycle/base.py similarity index 81% rename from apiserver/plane/app/views/cycle.py rename to apiserver/plane/app/views/cycle/base.py index 85e1e9f2e84..d323a0f631f 100644 --- a/apiserver/plane/app/views/cycle.py +++ b/apiserver/plane/app/views/cycle/base.py @@ -1,63 +1,56 @@ # Python imports import json +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField + # Django imports from django.db.models import ( - Func, - F, - Q, + Case, + CharField, + Count, Exists, + F, + Func, OuterRef, - Count, Prefetch, - Sum, - Case, - When, + Q, + UUIDField, Value, - CharField, + When, ) -from django.core import serializers -from django.utils import timezone -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page -from django.contrib.postgres.aggregates import ArrayAgg -from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField from django.db.models.functions import Coalesce +from django.utils import timezone +from rest_framework import status # Third party imports from rest_framework.response import Response -from rest_framework import status -# Module imports -from . import BaseViewSet, BaseAPIView, WebhookMixin -from plane.app.serializers import ( - CycleSerializer, - CycleIssueSerializer, - CycleFavoriteSerializer, - IssueSerializer, - CycleWriteSerializer, - CycleUserPropertiesSerializer, -) from plane.app.permissions import ( ProjectEntityPermission, ProjectLitePermission, ) +from plane.app.serializers import ( + CycleFavoriteSerializer, + CycleSerializer, + CycleUserPropertiesSerializer, + CycleWriteSerializer, +) +from plane.bgtasks.issue_activites_task import issue_activity from plane.db.models import ( - User, Cycle, + CycleFavorite, CycleIssue, + CycleUserProperties, Issue, - CycleFavorite, - IssueLink, - IssueAttachment, Label, - CycleUserProperties, + User, ) -from plane.bgtasks.issue_activites_task import issue_activity -from plane.utils.issue_filters import issue_filters from plane.utils.analytics_plot import burndown_plot +# Module imports +from .. import BaseAPIView, BaseViewSet, WebhookMixin + class CycleViewSet(WebhookMixin, BaseViewSet): serializer_class = CycleSerializer @@ -89,6 +82,7 @@ def get_queryset(self): project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) + .filter(project__archived_at__isnull=True) .select_related("project", "workspace", "owned_by") .prefetch_related( Prefetch( @@ -109,7 +103,8 @@ def get_queryset(self): .annotate(is_favorite=Exists(favorite_subquery)) .annotate( total_issues=Count( - "issue_cycle", + "issue_cycle__issue__id", + distinct=True, filter=Q( issue_cycle__issue__archived_at__isnull=True, issue_cycle__issue__is_draft=False, @@ -118,7 +113,8 @@ def get_queryset(self): ) .annotate( completed_issues=Count( - "issue_cycle__issue__state__group", + "issue_cycle__issue__id", + distinct=True, filter=Q( issue_cycle__issue__state__group="completed", issue_cycle__issue__archived_at__isnull=True, @@ -128,7 +124,8 @@ def get_queryset(self): ) .annotate( cancelled_issues=Count( - "issue_cycle__issue__state__group", + "issue_cycle__issue__id", + distinct=True, filter=Q( issue_cycle__issue__state__group="cancelled", issue_cycle__issue__archived_at__isnull=True, @@ -138,7 +135,8 @@ def get_queryset(self): ) .annotate( started_issues=Count( - "issue_cycle__issue__state__group", + "issue_cycle__issue__id", + distinct=True, filter=Q( issue_cycle__issue__state__group="started", issue_cycle__issue__archived_at__isnull=True, @@ -148,7 +146,8 @@ def get_queryset(self): ) .annotate( unstarted_issues=Count( - "issue_cycle__issue__state__group", + "issue_cycle__issue__id", + distinct=True, filter=Q( issue_cycle__issue__state__group="unstarted", issue_cycle__issue__archived_at__isnull=True, @@ -158,7 +157,8 @@ def get_queryset(self): ) .annotate( backlog_issues=Count( - "issue_cycle__issue__state__group", + "issue_cycle__issue__id", + distinct=True, filter=Q( issue_cycle__issue__state__group="backlog", issue_cycle__issue__archived_at__isnull=True, @@ -202,7 +202,7 @@ def get_queryset(self): ) def list(self, request, slug, project_id): - queryset = self.get_queryset() + queryset = self.get_queryset().filter(archived_at__isnull=True) cycle_view = request.GET.get("cycle_view", "all") # Update the order by @@ -403,8 +403,8 @@ def create(self, request, slug, project_id): "progress_snapshot", # meta fields "is_favorite", - "total_issues", "cancelled_issues", + "total_issues", "completed_issues", "started_issues", "unstarted_issues", @@ -427,11 +427,15 @@ def create(self, request, slug, project_id): ) def partial_update(self, request, slug, project_id, pk): - queryset = ( - self.get_queryset() - .filter(workspace__slug=slug, project_id=project_id, pk=pk) + queryset = self.get_queryset().filter( + workspace__slug=slug, project_id=project_id, pk=pk ) cycle = queryset.first() + if cycle.archived_at: + return Response( + {"error": "Archived cycle cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) request_data = request.data if ( @@ -489,10 +493,22 @@ def partial_update(self, request, slug, project_id, pk): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def retrieve(self, request, slug, project_id, pk): - queryset = self.get_queryset().filter(pk=pk) + queryset = ( + self.get_queryset().filter(archived_at__isnull=True).filter(pk=pk) + ) data = ( self.get_queryset() .filter(pk=pk) + .annotate( + sub_issues=Issue.issue_objects.filter( + project_id=self.kwargs.get("project_id"), + parent__isnull=False, + issue_cycle__cycle_id=pk, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) .values( # necessary fields "id", @@ -509,6 +525,7 @@ def retrieve(self, request, slug, project_id, pk): "external_source", "external_id", "progress_snapshot", + "sub_issues", # meta fields "is_favorite", "total_issues", @@ -662,273 +679,194 @@ def destroy(self, request, slug, project_id, pk): return Response(status=status.HTTP_204_NO_CONTENT) -class CycleIssueViewSet(WebhookMixin, BaseViewSet): - serializer_class = CycleIssueSerializer - model = CycleIssue - - webhook_event = "cycle_issue" - bulk = True +class CycleArchiveUnarchiveEndpoint(BaseAPIView): permission_classes = [ ProjectEntityPermission, ] - filterset_fields = [ - "issue__labels__id", - "issue__assignees__id", - ] - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("issue_id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(workspace__slug=self.kwargs.get("slug")) + favorite_subquery = CycleFavorite.objects.filter( + user=self.request.user, + cycle_id=OuterRef("pk"), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + ) + return ( + Cycle.objects.filter(workspace__slug=self.kwargs.get("slug")) .filter(project_id=self.kwargs.get("project_id")) + .filter(archived_at__isnull=False) .filter( project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, ) - .filter(cycle_id=self.kwargs.get("cycle_id")) - .select_related("project") - .select_related("workspace") - .select_related("cycle") - .select_related("issue", "issue__state", "issue__project") - .prefetch_related("issue__assignees", "issue__labels") - .distinct() - ) - - @method_decorator(gzip_page) - def list(self, request, slug, project_id, cycle_id): - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] - order_by = request.GET.get("order_by", "created_at") - filters = issue_filters(request.query_params, "GET") - queryset = ( - Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) - .filter(project_id=project_id) - .filter(workspace__slug=slug) - .filter(**filters) - .select_related("workspace", "project", "state", "parent") + .filter(project__archived_at__isnull=True) + .select_related("project", "workspace", "owned_by") + .prefetch_related( + Prefetch( + "issue_cycle__issue__assignees", + queryset=User.objects.only( + "avatar", "first_name", "id" + ).distinct(), + ) + ) .prefetch_related( - "assignees", - "labels", - "issue_module__module", - "issue_cycle__cycle", + Prefetch( + "issue_cycle__issue__labels", + queryset=Label.objects.only( + "name", "color", "id" + ).distinct(), + ) ) - .order_by(order_by) - .filter(**filters) - .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(is_favorite=Exists(favorite_subquery)) .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) ) .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( + ) + ) + .annotate( + status=Case( + When( + Q(start_date__lte=timezone.now()) + & Q(end_date__gte=timezone.now()), + then=Value("CURRENT"), + ), + When( + start_date__gt=timezone.now(), then=Value("UPCOMING") + ), + When(end_date__lt=timezone.now(), then=Value("COMPLETED")), + When( + Q(start_date__isnull=True) & Q(end_date__isnull=True), + then=Value("DRAFT"), + ), + default=Value("DRAFT"), + output_field=CharField(), + ) + ) + .annotate( + assignee_ids=Coalesce( ArrayAgg( - "issue_module__module_id", + "issue_cycle__issue__assignees__id", distinct=True, - filter=~Q(issue_module__module_id__isnull=True), + filter=~Q( + issue_cycle__issue__assignees__id__isnull=True + ) + & Q( + issue_cycle__issue__assignees__member_project__is_active=True + ), ), Value([], output_field=ArrayField(UUIDField())), - ), + ) ) - .order_by(order_by) + .order_by("-is_favorite", "name") + .distinct() ) - if self.fields: - issues = IssueSerializer( - queryset, many=True, fields=fields if fields else None - ).data - else: - issues = queryset.values( + + def get(self, request, slug, project_id): + queryset = ( + self.get_queryset() + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .values( + # necessary fields "id", + "workspace_id", + "project_id", + # model fields "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", + "description", "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", + "end_date", + "owned_by_id", + "view_props", + "sort_order", + "external_source", + "external_id", + "progress_snapshot", + # meta fields + "total_issues", + "is_favorite", + "cancelled_issues", + "completed_issues", + "started_issues", + "unstarted_issues", + "backlog_issues", "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", + "status", "archived_at", ) - return Response(issues, status=status.HTTP_200_OK) - - def create(self, request, slug, project_id, cycle_id): - issues = request.data.get("issues", []) - - if not issues: - return Response( - {"error": "Issues are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) + ).order_by("-is_favorite", "-created_at") + return Response(queryset, status=status.HTTP_200_OK) + def post(self, request, slug, project_id, cycle_id): cycle = Cycle.objects.get( - workspace__slug=slug, project_id=project_id, pk=cycle_id - ) - - if ( - cycle.end_date is not None - and cycle.end_date < timezone.now().date() - ): - return Response( - { - "error": "The Cycle has already been completed so no new issues can be added" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get all CycleIssues already created - cycle_issues = list( - CycleIssue.objects.filter( - ~Q(cycle_id=cycle_id), issue_id__in=issues - ) + pk=cycle_id, project_id=project_id, workspace__slug=slug ) - existing_issues = [ - str(cycle_issue.issue_id) for cycle_issue in cycle_issues - ] - new_issues = list(set(issues) - set(existing_issues)) - - # New issues to create - created_records = CycleIssue.objects.bulk_create( - [ - CycleIssue( - project_id=project_id, - workspace_id=cycle.workspace_id, - created_by_id=request.user.id, - updated_by_id=request.user.id, - cycle_id=cycle_id, - issue_id=issue, - ) - for issue in new_issues - ], - batch_size=10, - ) - - # Updated Issues - updated_records = [] - update_cycle_issue_activity = [] - # Iterate over each cycle_issue in cycle_issues - for cycle_issue in cycle_issues: - # Update the cycle_issue's cycle_id - cycle_issue.cycle_id = cycle_id - # Add the modified cycle_issue to the records_to_update list - updated_records.append(cycle_issue) - # Record the update activity - update_cycle_issue_activity.append( - { - "old_cycle_id": str(cycle_issue.cycle_id), - "new_cycle_id": str(cycle_id), - "issue_id": str(cycle_issue.issue_id), - } - ) - - # Update the cycle issues - CycleIssue.objects.bulk_update(updated_records, ["cycle_id"], batch_size=100) - # Capture Issue Activity - issue_activity.delay( - type="cycle.activity.created", - requested_data=json.dumps({"cycles_list": issues}), - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "updated_cycle_issues": update_cycle_issue_activity, - "created_cycle_issues": serializers.serialize( - "json", created_records - ), - } - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), + cycle.archived_at = timezone.now() + cycle.save() + return Response( + {"archived_at": str(cycle.archived_at)}, + status=status.HTTP_200_OK, ) - return Response({"message": "success"}, status=status.HTTP_201_CREATED) - def destroy(self, request, slug, project_id, cycle_id, issue_id): - cycle_issue = CycleIssue.objects.get( - issue_id=issue_id, - workspace__slug=slug, - project_id=project_id, - cycle_id=cycle_id, - ) - issue_activity.delay( - type="cycle.activity.deleted", - requested_data=json.dumps( - { - "cycle_id": str(self.kwargs.get("cycle_id")), - "issues": [str(issue_id)], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(issue_id), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), + def delete(self, request, slug, project_id, cycle_id): + cycle = Cycle.objects.get( + pk=cycle_id, project_id=project_id, workspace__slug=slug ) - cycle_issue.delete() + cycle.archived_at = None + cycle.save() return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/cycle/issue.py b/apiserver/plane/app/views/cycle/issue.py new file mode 100644 index 00000000000..2a5505dd055 --- /dev/null +++ b/apiserver/plane/app/views/cycle/issue.py @@ -0,0 +1,314 @@ +# Python imports +import json + +# Django imports +from django.db.models import ( + Func, + F, + Q, + OuterRef, + Value, + UUIDField, +) +from django.core import serializers +from django.utils import timezone +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models.functions import Coalesce + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet, WebhookMixin +from plane.app.serializers import ( + IssueSerializer, + CycleIssueSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Cycle, + CycleIssue, + Issue, + IssueLink, + IssueAttachment, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.issue_filters import issue_filters + + +class CycleIssueViewSet(WebhookMixin, BaseViewSet): + serializer_class = CycleIssueSerializer + model = CycleIssue + + webhook_event = "cycle_issue" + bulk = True + + permission_classes = [ + ProjectEntityPermission, + ] + + filterset_fields = [ + "issue__labels__id", + "issue__assignees__id", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("issue_id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + .filter(project__archived_at__isnull=True) + .filter(cycle_id=self.kwargs.get("cycle_id")) + .select_related("project") + .select_related("workspace") + .select_related("cycle") + .select_related("issue", "issue__state", "issue__project") + .prefetch_related("issue__assignees", "issue__labels") + .distinct() + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id, cycle_id): + fields = [ + field + for field in request.GET.get("fields", "").split(",") + if field + ] + order_by = request.GET.get("order_by", "created_at") + filters = issue_filters(request.query_params, "GET") + queryset = ( + Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .filter(**filters) + .select_related("workspace", "project", "state", "parent") + .prefetch_related( + "assignees", + "labels", + "issue_module__module", + "issue_cycle__cycle", + ) + .order_by(order_by) + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .order_by(order_by) + ) + if self.fields: + issues = IssueSerializer( + queryset, many=True, fields=fields if fields else None + ).data + else: + issues = queryset.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response(issues, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id, cycle_id): + issues = request.data.get("issues", []) + + if not issues: + return Response( + {"error": "Issues are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=cycle_id + ) + + if ( + cycle.end_date is not None + and cycle.end_date < timezone.now().date() + ): + return Response( + { + "error": "The Cycle has already been completed so no new issues can be added" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get all CycleIssues already created + cycle_issues = list( + CycleIssue.objects.filter( + ~Q(cycle_id=cycle_id), issue_id__in=issues + ) + ) + existing_issues = [ + str(cycle_issue.issue_id) for cycle_issue in cycle_issues + ] + new_issues = list(set(issues) - set(existing_issues)) + + # New issues to create + created_records = CycleIssue.objects.bulk_create( + [ + CycleIssue( + project_id=project_id, + workspace_id=cycle.workspace_id, + created_by_id=request.user.id, + updated_by_id=request.user.id, + cycle_id=cycle_id, + issue_id=issue, + ) + for issue in new_issues + ], + batch_size=10, + ) + + # Updated Issues + updated_records = [] + update_cycle_issue_activity = [] + # Iterate over each cycle_issue in cycle_issues + for cycle_issue in cycle_issues: + # Update the cycle_issue's cycle_id + cycle_issue.cycle_id = cycle_id + # Add the modified cycle_issue to the records_to_update list + updated_records.append(cycle_issue) + # Record the update activity + update_cycle_issue_activity.append( + { + "old_cycle_id": str(cycle_issue.cycle_id), + "new_cycle_id": str(cycle_id), + "issue_id": str(cycle_issue.issue_id), + } + ) + + # Update the cycle issues + CycleIssue.objects.bulk_update( + updated_records, ["cycle_id"], batch_size=100 + ) + # Capture Issue Activity + issue_activity.delay( + type="cycle.activity.created", + requested_data=json.dumps({"cycles_list": issues}), + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "updated_cycle_issues": update_cycle_issue_activity, + "created_cycle_issues": serializers.serialize( + "json", created_records + ), + } + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response({"message": "success"}, status=status.HTTP_201_CREATED) + + def destroy(self, request, slug, project_id, cycle_id, issue_id): + cycle_issue = CycleIssue.objects.get( + issue_id=issue_id, + workspace__slug=slug, + project_id=project_id, + cycle_id=cycle_id, + ) + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(self.kwargs.get("cycle_id")), + "issues": [str(issue_id)], + } + ), + actor_id=str(self.request.user.id), + issue_id=str(issue_id), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + cycle_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/dashboard.py b/apiserver/plane/app/views/dashboard/base.py similarity index 84% rename from apiserver/plane/app/views/dashboard.py rename to apiserver/plane/app/views/dashboard/base.py index 62ce0d910fe..33b3cf9d5b9 100644 --- a/apiserver/plane/app/views/dashboard.py +++ b/apiserver/plane/app/views/dashboard/base.py @@ -9,15 +9,15 @@ F, Exists, OuterRef, - Max, Subquery, JSONField, Func, Prefetch, + IntegerField, ) from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField +from django.db.models import UUIDField from django.db.models.functions import Coalesce from django.utils import timezone @@ -26,7 +26,7 @@ from rest_framework import status # Module imports -from . import BaseAPIView +from .. import BaseAPIView from plane.db.models import ( Issue, IssueActivity, @@ -38,6 +38,7 @@ IssueLink, IssueAttachment, IssueRelation, + User, ) from plane.app.serializers import ( IssueActivitySerializer, @@ -148,7 +149,8 @@ def dashboard_assigned_issues(self, request, slug): ArrayAgg( "assignees__id", distinct=True, - filter=~Q(assignees__id__isnull=True), + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), ), Value([], output_field=ArrayField(UUIDField())), ), @@ -212,11 +214,11 @@ def dashboard_assigned_issues(self, request, slug): if issue_type == "overdue": overdue_issues_count = assigned_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__lt=timezone.now() + target_date__lt=timezone.now(), ).count() overdue_issues = assigned_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__lt=timezone.now() + target_date__lt=timezone.now(), )[:5] return Response( { @@ -231,11 +233,11 @@ def dashboard_assigned_issues(self, request, slug): if issue_type == "upcoming": upcoming_issues_count = assigned_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__gte=timezone.now() + target_date__gte=timezone.now(), ).count() upcoming_issues = assigned_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__gte=timezone.now() + target_date__gte=timezone.now(), )[:5] return Response( { @@ -302,7 +304,8 @@ def dashboard_created_issues(self, request, slug): ArrayAgg( "assignees__id", distinct=True, - filter=~Q(assignees__id__isnull=True), + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), ), Value([], output_field=ArrayField(UUIDField())), ), @@ -365,11 +368,11 @@ def dashboard_created_issues(self, request, slug): if issue_type == "overdue": overdue_issues_count = created_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__lt=timezone.now() + target_date__lt=timezone.now(), ).count() overdue_issues = created_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__lt=timezone.now() + target_date__lt=timezone.now(), )[:5] return Response( { @@ -382,11 +385,11 @@ def dashboard_created_issues(self, request, slug): if issue_type == "upcoming": upcoming_issues_count = created_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__gte=timezone.now() + target_date__gte=timezone.now(), ).count() upcoming_issues = created_issues.filter( state__group__in=["backlog", "unstarted", "started"], - target_date__gte=timezone.now() + target_date__gte=timezone.now(), )[:5] return Response( { @@ -470,6 +473,7 @@ def dashboard_recent_activity(self, request, slug): workspace__slug=slug, project__project_projectmember__member=request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, actor=request.user, ).select_related("actor", "workspace", "issue", "project")[:8] @@ -485,6 +489,7 @@ def dashboard_recent_projects(self, request, slug): workspace__slug=slug, project__project_projectmember__member=request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, actor=request.user, ) .values_list("project_id", flat=True) @@ -499,11 +504,14 @@ def dashboard_recent_projects(self, request, slug): additional_projects = Project.objects.filter( project_projectmember__member=request.user, project_projectmember__is_active=True, + archived_at__isnull=True, workspace__slug=slug, ).exclude(id__in=unique_project_ids) # Append additional project IDs to the existing list - unique_project_ids.update(additional_projects.values_list("id", flat=True)) + unique_project_ids.update( + additional_projects.values_list("id", flat=True) + ) return Response( list(unique_project_ids)[:4], @@ -512,90 +520,99 @@ def dashboard_recent_projects(self, request, slug): def dashboard_recent_collaborators(self, request, slug): - # Fetch all project IDs where the user belongs to - user_projects = Project.objects.filter( - project_projectmember__member=request.user, - project_projectmember__is_active=True, - workspace__slug=slug, - ).values_list("id", flat=True) - - # Fetch all users who have performed an activity in the projects where the user exists - users_with_activities = ( + # Subquery to count activities for each project member + activity_count_subquery = ( IssueActivity.objects.filter( workspace__slug=slug, - project_id__in=user_projects, + actor=OuterRef("member"), + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, ) .values("actor") - .exclude(actor=request.user) - .annotate(num_activities=Count("actor")) - .order_by("-num_activities") - )[:7] - - # Get the count of active issues for each user in users_with_activities - users_with_active_issues = [] - for user_activity in users_with_activities: - user_id = user_activity["actor"] - active_issue_count = Issue.objects.filter( - assignees__in=[user_id], - state__group__in=["unstarted", "started"], - ).count() - users_with_active_issues.append( - {"user_id": user_id, "active_issue_count": active_issue_count} + .annotate(num_activities=Count("pk")) + .values("num_activities") + ) + + # Get all project members and annotate them with activity counts + project_members_with_activities = ( + ProjectMember.objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .annotate( + num_activities=Coalesce( + Subquery(activity_count_subquery), + Value(0), + output_field=IntegerField(), + ), + is_current_user=Case( + When(member=request.user, then=Value(0)), + default=Value(1), + output_field=IntegerField(), + ), + ) + .values_list("member", flat=True) + .order_by("is_current_user", "-num_activities") + .distinct() + ) + search = request.query_params.get("search", None) + if search: + project_members_with_activities = ( + project_members_with_activities.filter( + Q(member__display_name__icontains=search) + | Q(member__first_name__icontains=search) + | Q(member__last_name__icontains=search) + ) ) - # Insert the logged-in user's ID and their active issue count at the beginning - active_issue_count = Issue.objects.filter( - assignees__in=[request.user], - state__group__in=["unstarted", "started"], - ).count() + return self.paginate( + request=request, + queryset=project_members_with_activities, + controller=self.get_results_controller, + ) - if users_with_activities.count() < 7: - # Calculate the additional collaborators needed - additional_collaborators_needed = 7 - users_with_activities.count() - - # Fetch additional collaborators from the project_member table - additional_collaborators = list( - set( - ProjectMember.objects.filter( - ~Q(member=request.user), - project_id__in=user_projects, - workspace__slug=slug, - ) - .exclude( - member__in=[ - user["actor"] for user in users_with_activities - ] + +class DashboardEndpoint(BaseAPIView): + def get_results_controller(self, project_members_with_activities): + user_active_issue_counts = ( + User.objects.filter(id__in=project_members_with_activities) + .annotate( + active_issue_count=Count( + Case( + When( + issue_assignee__issue__state__group__in=[ + "unstarted", + "started", + ], + then=1, + ), + output_field=IntegerField(), + ) ) - .values_list("member", flat=True) ) + .values("active_issue_count", user_id=F("id")) ) + # Create a dictionary to store the active issue counts by user ID + active_issue_counts_dict = { + user["user_id"]: user["active_issue_count"] + for user in user_active_issue_counts + } - additional_collaborators = additional_collaborators[ - :additional_collaborators_needed + # Preserve the sequence of project members with activities + paginated_results = [ + { + "user_id": member_id, + "active_issue_count": active_issue_counts_dict.get( + member_id, 0 + ), + } + for member_id in project_members_with_activities ] + return paginated_results - # Append additional collaborators to the list - for collaborator_id in additional_collaborators: - active_issue_count = Issue.objects.filter( - assignees__in=[collaborator_id], - state__group__in=["unstarted", "started"], - ).count() - users_with_active_issues.append( - { - "user_id": str(collaborator_id), - "active_issue_count": active_issue_count, - } - ) - - users_with_active_issues.insert( - 0, - {"user_id": request.user.id, "active_issue_count": active_issue_count}, - ) - - return Response(users_with_active_issues, status=status.HTTP_200_OK) - - -class DashboardEndpoint(BaseAPIView): def create(self, request, slug): serializer = DashboardSerializer(data=request.data) if serializer.is_valid(): @@ -622,7 +639,9 @@ def get(self, request, slug, dashboard_id=None): dashboard_type = request.GET.get("dashboard_type", None) if dashboard_type == "home": dashboard, created = Dashboard.objects.get_or_create( - type_identifier=dashboard_type, owned_by=request.user, is_default=True + type_identifier=dashboard_type, + owned_by=request.user, + is_default=True, ) if created: @@ -639,7 +658,9 @@ def get(self, request, slug, dashboard_id=None): updated_dashboard_widgets = [] for widget_key in widgets_to_fetch: - widget = Widget.objects.filter(key=widget_key).values_list("id", flat=True) + widget = Widget.objects.filter( + key=widget_key + ).values_list("id", flat=True) if widget: updated_dashboard_widgets.append( DashboardWidget( diff --git a/apiserver/plane/app/views/error_404.py b/apiserver/plane/app/views/error_404.py new file mode 100644 index 00000000000..3c31474e0a5 --- /dev/null +++ b/apiserver/plane/app/views/error_404.py @@ -0,0 +1,5 @@ +# views.py +from django.http import JsonResponse + +def custom_404_view(request, exception=None): + return JsonResponse({"error": "Page not found."}, status=404) diff --git a/apiserver/plane/app/views/estimate.py b/apiserver/plane/app/views/estimate/base.py similarity index 94% rename from apiserver/plane/app/views/estimate.py rename to apiserver/plane/app/views/estimate/base.py index 3402bb06864..7ac3035a956 100644 --- a/apiserver/plane/app/views/estimate.py +++ b/apiserver/plane/app/views/estimate/base.py @@ -3,7 +3,7 @@ from rest_framework import status # Module imports -from .base import BaseViewSet, BaseAPIView +from ..base import BaseViewSet, BaseAPIView from plane.app.permissions import ProjectEntityPermission from plane.db.models import Project, Estimate, EstimatePoint from plane.app.serializers import ( @@ -11,7 +11,7 @@ EstimatePointSerializer, EstimateReadSerializer, ) - +from plane.utils.cache import invalidate_cache class ProjectEstimatePointEndpoint(BaseAPIView): permission_classes = [ @@ -49,6 +49,7 @@ def list(self, request, slug, project_id): serializer = EstimateReadSerializer(estimates, many=True) return Response(serializer.data, status=status.HTTP_200_OK) + @invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False) def create(self, request, slug, project_id): if not request.data.get("estimate", False): return Response( @@ -114,6 +115,7 @@ def retrieve(self, request, slug, project_id, estimate_id): status=status.HTTP_200_OK, ) + @invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False) def partial_update(self, request, slug, project_id, estimate_id): if not request.data.get("estimate", False): return Response( @@ -182,6 +184,7 @@ def partial_update(self, request, slug, project_id, estimate_id): status=status.HTTP_200_OK, ) + @invalidate_cache(path="/api/workspaces/:slug/estimates/", url_params=True, user=False) def destroy(self, request, slug, project_id, estimate_id): estimate = Estimate.objects.get( pk=estimate_id, workspace__slug=slug, project_id=project_id diff --git a/apiserver/plane/app/views/exporter.py b/apiserver/plane/app/views/exporter/base.py similarity index 88% rename from apiserver/plane/app/views/exporter.py rename to apiserver/plane/app/views/exporter/base.py index 179de81f97e..698d9eb997f 100644 --- a/apiserver/plane/app/views/exporter.py +++ b/apiserver/plane/app/views/exporter/base.py @@ -3,7 +3,7 @@ from rest_framework import status # Module imports -from . import BaseAPIView +from .. import BaseAPIView from plane.app.permissions import WorkSpaceAdminPermission from plane.bgtasks.export_task import issue_export_task from plane.db.models import Project, ExporterHistory, Workspace @@ -29,7 +29,10 @@ def post(self, request, slug): if provider in ["csv", "xlsx", "json"]: if not project_ids: project_ids = Project.objects.filter( - workspace__slug=slug + workspace__slug=slug, + project_projectmember__member=request.user, + project_projectmember__is_active=True, + archived_at__isnull=True, ).values_list("id", flat=True) project_ids = [str(project_id) for project_id in project_ids] @@ -50,7 +53,7 @@ def post(self, request, slug): ) return Response( { - "message": f"Once the export is ready you will be able to download it" + "message": "Once the export is ready you will be able to download it" }, status=status.HTTP_200_OK, ) diff --git a/apiserver/plane/app/views/external.py b/apiserver/plane/app/views/external/base.py similarity index 91% rename from apiserver/plane/app/views/external.py rename to apiserver/plane/app/views/external/base.py index 618c65e3ccb..2d5d2c7aa4b 100644 --- a/apiserver/plane/app/views/external.py +++ b/apiserver/plane/app/views/external/base.py @@ -8,17 +8,15 @@ from rest_framework import status # Django imports -from django.conf import settings # Module imports -from .base import BaseAPIView +from ..base import BaseAPIView from plane.app.permissions import ProjectEntityPermission from plane.db.models import Workspace, Project from plane.app.serializers import ( ProjectLiteSerializer, WorkspaceLiteSerializer, ) -from plane.utils.integrations.github import get_release_notes from plane.license.utils.instance_value import get_configuration_value @@ -85,12 +83,6 @@ def post(self, request, slug, project_id): ) -class ReleaseNotesEndpoint(BaseAPIView): - def get(self, request): - release_notes = get_release_notes() - return Response(release_notes, status=status.HTTP_200_OK) - - class UnsplashEndpoint(BaseAPIView): def get(self, request): (UNSPLASH_ACCESS_KEY,) = get_configuration_value( diff --git a/apiserver/plane/app/views/importer.py b/apiserver/plane/app/views/importer.py deleted file mode 100644 index a15ed36b761..00000000000 --- a/apiserver/plane/app/views/importer.py +++ /dev/null @@ -1,558 +0,0 @@ -# Python imports -import uuid - -# Third party imports -from rest_framework import status -from rest_framework.response import Response - -# Django imports -from django.db.models import Max, Q - -# Module imports -from plane.app.views import BaseAPIView -from plane.db.models import ( - WorkspaceIntegration, - Importer, - APIToken, - Project, - State, - IssueSequence, - Issue, - IssueActivity, - IssueComment, - IssueLink, - IssueLabel, - Workspace, - IssueAssignee, - Module, - ModuleLink, - ModuleIssue, - Label, -) -from plane.app.serializers import ( - ImporterSerializer, - IssueFlatSerializer, - ModuleSerializer, -) -from plane.utils.integrations.github import get_github_repo_details -from plane.utils.importers.jira import ( - jira_project_issue_summary, - is_allowed_hostname, -) -from plane.bgtasks.importer_task import service_importer -from plane.utils.html_processor import strip_tags -from plane.app.permissions import WorkSpaceAdminPermission - - -class ServiceIssueImportSummaryEndpoint(BaseAPIView): - def get(self, request, slug, service): - if service == "github": - owner = request.GET.get("owner", False) - repo = request.GET.get("repo", False) - - if not owner or not repo: - return Response( - {"error": "Owner and repo are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace_integration = WorkspaceIntegration.objects.get( - integration__provider="github", workspace__slug=slug - ) - - access_tokens_url = workspace_integration.metadata.get( - "access_tokens_url", False - ) - - if not access_tokens_url: - return Response( - { - "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - issue_count, labels, collaborators = get_github_repo_details( - access_tokens_url, owner, repo - ) - return Response( - { - "issue_count": issue_count, - "labels": labels, - "collaborators": collaborators, - }, - status=status.HTTP_200_OK, - ) - - if service == "jira": - # Check for all the keys - params = { - "project_key": "Project key is required", - "api_token": "API token is required", - "email": "Email is required", - "cloud_hostname": "Cloud hostname is required", - } - - for key, error_message in params.items(): - if not request.GET.get(key, False): - return Response( - {"error": error_message}, - status=status.HTTP_400_BAD_REQUEST, - ) - - project_key = request.GET.get("project_key", "") - api_token = request.GET.get("api_token", "") - email = request.GET.get("email", "") - cloud_hostname = request.GET.get("cloud_hostname", "") - - response = jira_project_issue_summary( - email, api_token, project_key, cloud_hostname - ) - if "error" in response: - return Response(response, status=status.HTTP_400_BAD_REQUEST) - else: - return Response( - response, - status=status.HTTP_200_OK, - ) - return Response( - {"error": "Service not supported yet"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - -class ImportServiceEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def post(self, request, slug, service): - project_id = request.data.get("project_id", False) - - if not project_id: - return Response( - {"error": "Project ID is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - if service == "github": - data = request.data.get("data", False) - metadata = request.data.get("metadata", False) - config = request.data.get("config", False) - if not data or not metadata or not config: - return Response( - {"error": "Data, config and metadata are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - api_token = APIToken.objects.filter( - user=request.user, workspace=workspace - ).first() - if api_token is None: - api_token = APIToken.objects.create( - user=request.user, - label="Importer", - workspace=workspace, - ) - - importer = Importer.objects.create( - service=service, - project_id=project_id, - status="queued", - initiated_by=request.user, - data=data, - metadata=metadata, - token=api_token, - config=config, - created_by=request.user, - updated_by=request.user, - ) - - service_importer.delay(service, importer.id) - serializer = ImporterSerializer(importer) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - if service == "jira": - data = request.data.get("data", False) - metadata = request.data.get("metadata", False) - config = request.data.get("config", False) - - cloud_hostname = metadata.get("cloud_hostname", False) - - if not cloud_hostname: - return Response( - {"error": "Cloud hostname is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if not is_allowed_hostname(cloud_hostname): - return Response( - {"error": "Hostname is not a valid hostname."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if not data or not metadata: - return Response( - {"error": "Data, config and metadata are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - api_token = APIToken.objects.filter( - user=request.user, workspace=workspace - ).first() - if api_token is None: - api_token = APIToken.objects.create( - user=request.user, - label="Importer", - workspace=workspace, - ) - - importer = Importer.objects.create( - service=service, - project_id=project_id, - status="queued", - initiated_by=request.user, - data=data, - metadata=metadata, - token=api_token, - config=config, - created_by=request.user, - updated_by=request.user, - ) - - service_importer.delay(service, importer.id) - serializer = ImporterSerializer(importer) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - return Response( - {"error": "Servivce not supported yet"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request, slug): - imports = ( - Importer.objects.filter(workspace__slug=slug) - .order_by("-created_at") - .select_related("initiated_by", "project", "workspace") - ) - serializer = ImporterSerializer(imports, many=True) - return Response(serializer.data) - - def delete(self, request, slug, service, pk): - importer = Importer.objects.get( - pk=pk, service=service, workspace__slug=slug - ) - - if importer.imported_data is not None: - # Delete all imported Issues - imported_issues = importer.imported_data.get("issues", []) - Issue.issue_objects.filter(id__in=imported_issues).delete() - - # Delete all imported Labels - imported_labels = importer.imported_data.get("labels", []) - Label.objects.filter(id__in=imported_labels).delete() - - if importer.service == "jira": - imported_modules = importer.imported_data.get("modules", []) - Module.objects.filter(id__in=imported_modules).delete() - importer.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - def patch(self, request, slug, service, pk): - importer = Importer.objects.get( - pk=pk, service=service, workspace__slug=slug - ) - serializer = ImporterSerializer( - importer, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - -class UpdateServiceImportStatusEndpoint(BaseAPIView): - def post(self, request, slug, project_id, service, importer_id): - importer = Importer.objects.get( - pk=importer_id, - workspace__slug=slug, - project_id=project_id, - service=service, - ) - importer.status = request.data.get("status", "processing") - importer.save() - return Response(status.HTTP_200_OK) - - -class BulkImportIssuesEndpoint(BaseAPIView): - def post(self, request, slug, project_id, service): - # Get the project - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - # Get the default state - default_state = State.objects.filter( - ~Q(name="Triage"), project_id=project_id, default=True - ).first() - # if there is no default state assign any random state - if default_state is None: - default_state = State.objects.filter( - ~Q(name="Triage"), project_id=project_id - ).first() - - # Get the maximum sequence_id - last_id = IssueSequence.objects.filter( - project_id=project_id - ).aggregate(largest=Max("sequence"))["largest"] - - last_id = 1 if last_id is None else last_id + 1 - - # Get the maximum sort order - largest_sort_order = Issue.objects.filter( - project_id=project_id, state=default_state - ).aggregate(largest=Max("sort_order"))["largest"] - - largest_sort_order = ( - 65535 if largest_sort_order is None else largest_sort_order + 10000 - ) - - # Get the issues_data - issues_data = request.data.get("issues_data", []) - - if not len(issues_data): - return Response( - {"error": "Issue data is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Issues - bulk_issues = [] - for issue_data in issues_data: - bulk_issues.append( - Issue( - project_id=project_id, - workspace_id=project.workspace_id, - state_id=issue_data.get("state") - if issue_data.get("state", False) - else default_state.id, - name=issue_data.get("name", "Issue Created through Bulk"), - description_html=issue_data.get( - "description_html", "

" - ), - description_stripped=( - None - if ( - issue_data.get("description_html") == "" - or issue_data.get("description_html") is None - ) - else strip_tags(issue_data.get("description_html")) - ), - sequence_id=last_id, - sort_order=largest_sort_order, - start_date=issue_data.get("start_date", None), - target_date=issue_data.get("target_date", None), - priority=issue_data.get("priority", "none"), - created_by=request.user, - ) - ) - - largest_sort_order = largest_sort_order + 10000 - last_id = last_id + 1 - - issues = Issue.objects.bulk_create( - bulk_issues, - batch_size=100, - ignore_conflicts=True, - ) - - # Sequences - _ = IssueSequence.objects.bulk_create( - [ - IssueSequence( - issue=issue, - sequence=issue.sequence_id, - project_id=project_id, - workspace_id=project.workspace_id, - ) - for issue in issues - ], - batch_size=100, - ) - - # Attach Labels - bulk_issue_labels = [] - for issue, issue_data in zip(issues, issues_data): - labels_list = issue_data.get("labels_list", []) - bulk_issue_labels = bulk_issue_labels + [ - IssueLabel( - issue=issue, - label_id=label_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for label_id in labels_list - ] - - _ = IssueLabel.objects.bulk_create( - bulk_issue_labels, batch_size=100, ignore_conflicts=True - ) - - # Attach Assignees - bulk_issue_assignees = [] - for issue, issue_data in zip(issues, issues_data): - assignees_list = issue_data.get("assignees_list", []) - bulk_issue_assignees = bulk_issue_assignees + [ - IssueAssignee( - issue=issue, - assignee_id=assignee_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for assignee_id in assignees_list - ] - - _ = IssueAssignee.objects.bulk_create( - bulk_issue_assignees, batch_size=100, ignore_conflicts=True - ) - - # Track the issue activities - IssueActivity.objects.bulk_create( - [ - IssueActivity( - issue=issue, - actor=request.user, - project_id=project_id, - workspace_id=project.workspace_id, - comment=f"imported the issue from {service}", - verb="created", - created_by=request.user, - ) - for issue in issues - ], - batch_size=100, - ) - - # Create Comments - bulk_issue_comments = [] - for issue, issue_data in zip(issues, issues_data): - comments_list = issue_data.get("comments_list", []) - bulk_issue_comments = bulk_issue_comments + [ - IssueComment( - issue=issue, - comment_html=comment.get("comment_html", "

"), - actor=request.user, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for comment in comments_list - ] - - _ = IssueComment.objects.bulk_create( - bulk_issue_comments, batch_size=100 - ) - - # Attach Links - _ = IssueLink.objects.bulk_create( - [ - IssueLink( - issue=issue, - url=issue_data.get("link", {}).get( - "url", "https://github.com" - ), - title=issue_data.get("link", {}).get( - "title", "Original Issue" - ), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for issue, issue_data in zip(issues, issues_data) - ] - ) - - return Response( - {"issues": IssueFlatSerializer(issues, many=True).data}, - status=status.HTTP_201_CREATED, - ) - - -class BulkImportModulesEndpoint(BaseAPIView): - def post(self, request, slug, project_id, service): - modules_data = request.data.get("modules_data", []) - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - modules = Module.objects.bulk_create( - [ - Module( - name=module.get("name", uuid.uuid4().hex), - description=module.get("description", ""), - start_date=module.get("start_date", None), - target_date=module.get("target_date", None), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for module in modules_data - ], - batch_size=100, - ignore_conflicts=True, - ) - - modules = Module.objects.filter( - id__in=[module.id for module in modules] - ) - - if len(modules) == len(modules_data): - _ = ModuleLink.objects.bulk_create( - [ - ModuleLink( - module=module, - url=module_data.get("link", {}).get( - "url", "https://plane.so" - ), - title=module_data.get("link", {}).get( - "title", "Original Issue" - ), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for module, module_data in zip(modules, modules_data) - ], - batch_size=100, - ignore_conflicts=True, - ) - - bulk_module_issues = [] - for module, module_data in zip(modules, modules_data): - module_issues_list = module_data.get("module_issues_list", []) - bulk_module_issues = bulk_module_issues + [ - ModuleIssue( - issue_id=issue, - module=module, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - ) - for issue in module_issues_list - ] - - _ = ModuleIssue.objects.bulk_create( - bulk_module_issues, batch_size=100, ignore_conflicts=True - ) - - serializer = ModuleSerializer(modules, many=True) - return Response( - {"modules": serializer.data}, status=status.HTTP_201_CREATED - ) - - else: - return Response( - { - "message": "Modules created but issues could not be imported" - }, - status=status.HTTP_200_OK, - ) diff --git a/apiserver/plane/app/views/inbox.py b/apiserver/plane/app/views/inbox/base.py similarity index 97% rename from apiserver/plane/app/views/inbox.py rename to apiserver/plane/app/views/inbox/base.py index ed32a14febf..710aa10a227 100644 --- a/apiserver/plane/app/views/inbox.py +++ b/apiserver/plane/app/views/inbox/base.py @@ -15,7 +15,7 @@ from rest_framework.response import Response # Module imports -from .base import BaseViewSet +from ..base import BaseViewSet from plane.app.permissions import ProjectBasePermission, ProjectLitePermission from plane.db.models import ( Inbox, @@ -146,7 +146,8 @@ def get_queryset(self): ArrayAgg( "assignees__id", distinct=True, - filter=~Q(assignees__id__isnull=True), + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), ), Value([], output_field=ArrayField(UUIDField())), ), @@ -213,7 +214,7 @@ def create(self, request, slug, project_id, inbox_id): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ + if request.data.get("issue", {}).get("priority", "none") not in [ "low", "medium", "high", @@ -428,8 +429,11 @@ def retrieve(self, request, slug, project_id, inbox_id, issue_id): ) ).first() if issue is None: - return Response({"error": "Requested object was not found"}, status=status.HTTP_404_NOT_FOUND) - + return Response( + {"error": "Requested object was not found"}, + status=status.HTTP_404_NOT_FOUND, + ) + serializer = IssueDetailSerializer(issue) return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/integration/__init__.py b/apiserver/plane/app/views/integration/__init__.py deleted file mode 100644 index ea20d96eafd..00000000000 --- a/apiserver/plane/app/views/integration/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .base import IntegrationViewSet, WorkspaceIntegrationViewSet -from .github import ( - GithubRepositorySyncViewSet, - GithubIssueSyncViewSet, - BulkCreateGithubIssueSyncEndpoint, - GithubCommentSyncViewSet, - GithubRepositoriesEndpoint, -) -from .slack import SlackProjectSyncViewSet diff --git a/apiserver/plane/app/views/integration/base.py b/apiserver/plane/app/views/integration/base.py deleted file mode 100644 index d757fe47126..00000000000 --- a/apiserver/plane/app/views/integration/base.py +++ /dev/null @@ -1,181 +0,0 @@ -# Python improts -import uuid -import requests - -# Django imports -from django.contrib.auth.hashers import make_password - -# Third party imports -from rest_framework.response import Response -from rest_framework import status -from sentry_sdk import capture_exception - -# Module imports -from plane.app.views import BaseViewSet -from plane.db.models import ( - Integration, - WorkspaceIntegration, - Workspace, - User, - WorkspaceMember, - APIToken, -) -from plane.app.serializers import ( - IntegrationSerializer, - WorkspaceIntegrationSerializer, -) -from plane.utils.integrations.github import ( - get_github_metadata, - delete_github_installation, -) -from plane.app.permissions import WorkSpaceAdminPermission -from plane.utils.integrations.slack import slack_oauth - - -class IntegrationViewSet(BaseViewSet): - serializer_class = IntegrationSerializer - model = Integration - - def create(self, request): - serializer = IntegrationSerializer(data=request.data) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def partial_update(self, request, pk): - integration = Integration.objects.get(pk=pk) - if integration.verified: - return Response( - {"error": "Verified integrations cannot be updated"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = IntegrationSerializer( - integration, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, pk): - integration = Integration.objects.get(pk=pk) - if integration.verified: - return Response( - {"error": "Verified integrations cannot be updated"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - integration.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class WorkspaceIntegrationViewSet(BaseViewSet): - serializer_class = WorkspaceIntegrationSerializer - model = WorkspaceIntegration - - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("integration") - ) - - def create(self, request, slug, provider): - workspace = Workspace.objects.get(slug=slug) - integration = Integration.objects.get(provider=provider) - config = {} - if provider == "github": - installation_id = request.data.get("installation_id", None) - if not installation_id: - return Response( - {"error": "Installation ID is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - metadata = get_github_metadata(installation_id) - config = {"installation_id": installation_id} - - if provider == "slack": - code = request.data.get("code", False) - - if not code: - return Response( - {"error": "Code is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - slack_response = slack_oauth(code=code) - - metadata = slack_response - access_token = metadata.get("access_token", False) - team_id = metadata.get("team", {}).get("id", False) - if not metadata or not access_token or not team_id: - return Response( - { - "error": "Slack could not be installed. Please try again later" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - config = {"team_id": team_id, "access_token": access_token} - - # Create a bot user - bot_user = User.objects.create( - email=f"{uuid.uuid4().hex}@plane.so", - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, - is_bot=True, - first_name=integration.title, - avatar=integration.avatar_url - if integration.avatar_url is not None - else "", - ) - - # Create an API Token for the bot user - api_token = APIToken.objects.create( - user=bot_user, - user_type=1, # bot user - workspace=workspace, - ) - - workspace_integration = WorkspaceIntegration.objects.create( - workspace=workspace, - integration=integration, - actor=bot_user, - api_token=api_token, - metadata=metadata, - config=config, - ) - - # Add bot user as a member of workspace - _ = WorkspaceMember.objects.create( - workspace=workspace_integration.workspace, - member=bot_user, - role=20, - ) - return Response( - WorkspaceIntegrationSerializer(workspace_integration).data, - status=status.HTTP_201_CREATED, - ) - - def destroy(self, request, slug, pk): - workspace_integration = WorkspaceIntegration.objects.get( - pk=pk, workspace__slug=slug - ) - - if workspace_integration.integration.provider == "github": - installation_id = workspace_integration.config.get( - "installation_id", False - ) - if installation_id: - delete_github_installation(installation_id=installation_id) - - workspace_integration.delete() - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/integration/github.py b/apiserver/plane/app/views/integration/github.py deleted file mode 100644 index 2d37c64b078..00000000000 --- a/apiserver/plane/app/views/integration/github.py +++ /dev/null @@ -1,202 +0,0 @@ -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from plane.app.views import BaseViewSet, BaseAPIView -from plane.db.models import ( - GithubIssueSync, - GithubRepositorySync, - GithubRepository, - WorkspaceIntegration, - ProjectMember, - Label, - GithubCommentSync, - Project, -) -from plane.app.serializers import ( - GithubIssueSyncSerializer, - GithubRepositorySyncSerializer, - GithubCommentSyncSerializer, -) -from plane.utils.integrations.github import get_github_repos -from plane.app.permissions import ( - ProjectBasePermission, - ProjectEntityPermission, -) - - -class GithubRepositoriesEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def get(self, request, slug, workspace_integration_id): - page = request.GET.get("page", 1) - workspace_integration = WorkspaceIntegration.objects.get( - workspace__slug=slug, pk=workspace_integration_id - ) - - if workspace_integration.integration.provider != "github": - return Response( - {"error": "Not a github integration"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - access_tokens_url = workspace_integration.metadata["access_tokens_url"] - repositories_url = ( - workspace_integration.metadata["repositories_url"] - + f"?per_page=100&page={page}" - ) - repositories = get_github_repos(access_tokens_url, repositories_url) - return Response(repositories, status=status.HTTP_200_OK) - - -class GithubRepositorySyncViewSet(BaseViewSet): - permission_classes = [ - ProjectBasePermission, - ] - - serializer_class = GithubRepositorySyncSerializer - model = GithubRepositorySync - - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - ) - - def create(self, request, slug, project_id, workspace_integration_id): - name = request.data.get("name", False) - url = request.data.get("url", False) - config = request.data.get("config", {}) - repository_id = request.data.get("repository_id", False) - owner = request.data.get("owner", False) - - if not name or not url or not repository_id or not owner: - return Response( - {"error": "Name, url, repository_id and owner are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get the workspace integration - workspace_integration = WorkspaceIntegration.objects.get( - pk=workspace_integration_id - ) - - # Delete the old repository object - GithubRepositorySync.objects.filter( - project_id=project_id, workspace__slug=slug - ).delete() - GithubRepository.objects.filter( - project_id=project_id, workspace__slug=slug - ).delete() - - # Create repository - repo = GithubRepository.objects.create( - name=name, - url=url, - config=config, - repository_id=repository_id, - owner=owner, - project_id=project_id, - ) - - # Create a Label for github - label = Label.objects.filter( - name="GitHub", - project_id=project_id, - ).first() - - if label is None: - label = Label.objects.create( - name="GitHub", - project_id=project_id, - description="Label to sync Plane issues with GitHub issues", - color="#003773", - ) - - # Create repo sync - repo_sync = GithubRepositorySync.objects.create( - repository=repo, - workspace_integration=workspace_integration, - actor=workspace_integration.actor, - credentials=request.data.get("credentials", {}), - project_id=project_id, - label=label, - ) - - # Add bot as a member in the project - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, role=20, project_id=project_id - ) - - # Return Response - return Response( - GithubRepositorySyncSerializer(repo_sync).data, - status=status.HTTP_201_CREATED, - ) - - -class GithubIssueSyncViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - serializer_class = GithubIssueSyncSerializer - model = GithubIssueSync - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - repository_sync_id=self.kwargs.get("repo_sync_id"), - ) - - -class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): - def post(self, request, slug, project_id, repo_sync_id): - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - github_issue_syncs = request.data.get("github_issue_syncs", []) - github_issue_syncs = GithubIssueSync.objects.bulk_create( - [ - GithubIssueSync( - issue_id=github_issue_sync.get("issue"), - repo_issue_id=github_issue_sync.get("repo_issue_id"), - issue_url=github_issue_sync.get("issue_url"), - github_issue_id=github_issue_sync.get("github_issue_id"), - repository_sync_id=repo_sync_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for github_issue_sync in github_issue_syncs - ], - batch_size=100, - ignore_conflicts=True, - ) - - serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - -class GithubCommentSyncViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - serializer_class = GithubCommentSyncSerializer - model = GithubCommentSync - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_sync_id=self.kwargs.get("issue_sync_id"), - ) diff --git a/apiserver/plane/app/views/integration/slack.py b/apiserver/plane/app/views/integration/slack.py deleted file mode 100644 index c22ee3e52bd..00000000000 --- a/apiserver/plane/app/views/integration/slack.py +++ /dev/null @@ -1,96 +0,0 @@ -# Django import -from django.db import IntegrityError - -# Third party imports -from rest_framework import status -from rest_framework.response import Response -from sentry_sdk import capture_exception - -# Module imports -from plane.app.views import BaseViewSet, BaseAPIView -from plane.db.models import ( - SlackProjectSync, - WorkspaceIntegration, - ProjectMember, -) -from plane.app.serializers import SlackProjectSyncSerializer -from plane.app.permissions import ( - ProjectBasePermission, - ProjectEntityPermission, -) -from plane.utils.integrations.slack import slack_oauth - - -class SlackProjectSyncViewSet(BaseViewSet): - permission_classes = [ - ProjectBasePermission, - ] - serializer_class = SlackProjectSyncSerializer - model = SlackProjectSync - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - ) - - def create(self, request, slug, project_id, workspace_integration_id): - try: - code = request.data.get("code", False) - - if not code: - return Response( - {"error": "Code is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - slack_response = slack_oauth(code=code) - - workspace_integration = WorkspaceIntegration.objects.get( - workspace__slug=slug, pk=workspace_integration_id - ) - - workspace_integration = WorkspaceIntegration.objects.get( - pk=workspace_integration_id, workspace__slug=slug - ) - slack_project_sync = SlackProjectSync.objects.create( - access_token=slack_response.get("access_token"), - scopes=slack_response.get("scope"), - bot_user_id=slack_response.get("bot_user_id"), - webhook_url=slack_response.get("incoming_webhook", {}).get( - "url" - ), - data=slack_response, - team_id=slack_response.get("team", {}).get("id"), - team_name=slack_response.get("team", {}).get("name"), - workspace_integration=workspace_integration, - project_id=project_id, - ) - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, - role=20, - project_id=project_id, - ) - serializer = SlackProjectSyncSerializer(slack_project_sync) - return Response(serializer.data, status=status.HTTP_200_OK) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"error": "Slack is already installed for the project"}, - status=status.HTTP_410_GONE, - ) - capture_exception(e) - return Response( - { - "error": "Slack could not be installed. Please try again later" - }, - status=status.HTTP_400_BAD_REQUEST, - ) diff --git a/apiserver/plane/app/views/issue.py b/apiserver/plane/app/views/issue.py deleted file mode 100644 index 14e0b6a9aa9..00000000000 --- a/apiserver/plane/app/views/issue.py +++ /dev/null @@ -1,2450 +0,0 @@ -# Python imports -import json -import random -from itertools import chain - -# Django imports -from django.utils import timezone -from django.db.models import ( - Prefetch, - OuterRef, - Func, - F, - Q, - Case, - Value, - CharField, - When, - Exists, - Max, -) -from django.core.serializers.json import DjangoJSONEncoder -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page -from django.db import IntegrityError -from django.contrib.postgres.aggregates import ArrayAgg -from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField -from django.db.models.functions import Coalesce - -# Third Party imports -from rest_framework.response import Response -from rest_framework import status -from rest_framework.parsers import MultiPartParser, FormParser - -# Module imports -from . import BaseViewSet, BaseAPIView, WebhookMixin -from plane.app.serializers import ( - IssueActivitySerializer, - IssueCommentSerializer, - IssuePropertySerializer, - IssueSerializer, - IssueCreateSerializer, - LabelSerializer, - IssueFlatSerializer, - IssueLinkSerializer, - IssueLiteSerializer, - IssueAttachmentSerializer, - IssueSubscriberSerializer, - ProjectMemberLiteSerializer, - IssueReactionSerializer, - CommentReactionSerializer, - IssueRelationSerializer, - RelatedIssueSerializer, - IssueDetailSerializer, -) -from plane.app.permissions import ( - ProjectEntityPermission, - WorkSpaceAdminPermission, - ProjectMemberPermission, - ProjectLitePermission, -) -from plane.db.models import ( - Project, - Issue, - IssueActivity, - IssueComment, - IssueProperty, - Label, - IssueLink, - IssueAttachment, - IssueSubscriber, - ProjectMember, - IssueReaction, - CommentReaction, - IssueRelation, -) -from plane.bgtasks.issue_activites_task import issue_activity -from plane.utils.grouper import group_results -from plane.utils.issue_filters import issue_filters -from collections import defaultdict - - -class IssueListEndpoint(BaseAPIView): - - permission_classes = [ - ProjectEntityPermission, - ] - - def get(self, request, slug, project_id): - issue_ids = request.GET.get("issues", False) - - if not issue_ids: - return Response( - {"error": "Issues are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - issue_ids = [issue_id for issue_id in issue_ids.split(",") if issue_id != ""] - - queryset = ( - Issue.issue_objects.filter( - workspace__slug=slug, project_id=project_id, pk__in=issue_ids - ) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=~Q(issue_module__module_id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - ) - ).distinct() - - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = queryset.filter(**filters) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" - if order_by_param.startswith("-") - else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - if self.fields or self.expand: - issues = IssueSerializer( - queryset, many=True, fields=self.fields, expand=self.expand - ).data - else: - issues = issue_queryset.values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - return Response(issues, status=status.HTTP_200_OK) - - -class IssueViewSet(WebhookMixin, BaseViewSet): - def get_serializer_class(self): - return ( - IssueCreateSerializer - if self.action in ["create", "update", "partial_update"] - else IssueSerializer - ) - - model = Issue - webhook_event = "issue" - permission_classes = [ - ProjectEntityPermission, - ] - - search_fields = [ - "name", - ] - - filterset_fields = [ - "state__name", - "assignees__id", - "workspace__id", - ] - - def get_queryset(self): - return ( - Issue.issue_objects.filter( - project_id=self.kwargs.get("project_id") - ) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=~Q(issue_module__module_id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - ) - ).distinct() - - @method_decorator(gzip_page) - def list(self, request, slug, project_id): - filters = issue_filters(request.query_params, "GET") - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = self.get_queryset().filter(**filters) - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" - if order_by_param.startswith("-") - else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - # Only use serializer when expand or fields else return by values - if self.expand or self.fields: - issues = IssueSerializer( - issue_queryset, - many=True, - fields=self.fields, - expand=self.expand, - ).data - else: - issues = issue_queryset.values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - return Response(issues, status=status.HTTP_200_OK) - - def create(self, request, slug, project_id): - project = Project.objects.get(pk=project_id) - - serializer = IssueCreateSerializer( - data=request.data, - context={ - "project_id": project_id, - "workspace_id": project.workspace_id, - "default_assignee_id": project.default_assignee_id, - }, - ) - - if serializer.is_valid(): - serializer.save() - - # Track the issue - issue_activity.delay( - type="issue.activity.created", - requested_data=json.dumps( - self.request.data, cls=DjangoJSONEncoder - ), - actor_id=str(request.user.id), - issue_id=str(serializer.data.get("id", None)), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue = ( - self.get_queryset() - .filter(pk=serializer.data["id"]) - .values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - .first() - ) - return Response(issue, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def retrieve(self, request, slug, project_id, pk=None): - issue = ( - self.get_queryset() - .filter(pk=pk) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related( - "issue", "actor" - ), - ) - ) - .prefetch_related( - Prefetch( - "issue_attachment", - queryset=IssueAttachment.objects.select_related("issue"), - ) - ) - .prefetch_related( - Prefetch( - "issue_link", - queryset=IssueLink.objects.select_related("created_by"), - ) - ) - .annotate( - is_subscribed=Exists( - IssueSubscriber.objects.filter( - workspace__slug=slug, - project_id=project_id, - issue_id=OuterRef("pk"), - subscriber=request.user, - ) - ) - ) - ).first() - if not issue: - return Response( - {"error": "The required object does not exist."}, - status=status.HTTP_404_NOT_FOUND, - ) - - serializer = IssueDetailSerializer(issue, expand=self.expand) - return Response(serializer.data, status=status.HTTP_200_OK) - - def partial_update(self, request, slug, project_id, pk=None): - issue = self.get_queryset().filter(pk=pk).first() - - if not issue: - return Response( - {"error": "Issue not found"}, - status=status.HTTP_404_NOT_FOUND, - ) - - current_instance = json.dumps( - IssueSerializer(issue).data, cls=DjangoJSONEncoder - ) - - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - serializer = IssueCreateSerializer( - issue, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(request.user.id), - issue_id=str(pk), - project_id=str(project_id), - current_instance=current_instance, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue = self.get_queryset().filter(pk=pk).first() - return Response(status=status.HTTP_204_NO_CONTENT) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id, pk=None): - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - issue.delete() - issue_activity.delay( - type="issue.activity.deleted", - requested_data=json.dumps({"issue_id": str(pk)}), - actor_id=str(request.user.id), - issue_id=str(pk), - project_id=str(project_id), - current_instance={}, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(status=status.HTTP_204_NO_CONTENT) - - -# TODO: deprecated remove once confirmed -class UserWorkSpaceIssues(BaseAPIView): - @method_decorator(gzip_page) - def get(self, request, slug): - filters = issue_filters(request.query_params, "GET") - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = ( - Issue.issue_objects.filter( - ( - Q(assignees__in=[request.user]) - | Q(created_by=request.user) - | Q(issue_subscribers__subscriber=request.user) - ), - workspace__slug=slug, - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .select_related("project") - .select_related("workspace") - .select_related("state") - .select_related("parent") - .prefetch_related("assignees") - .prefetch_related("labels") - .order_by(order_by_param) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(**filters) - ).distinct() - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" - if order_by_param.startswith("-") - else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueLiteSerializer(issue_queryset, many=True).data - - ## Grouping the results - group_by = request.GET.get("group_by", False) - sub_group_by = request.GET.get("sub_group_by", False) - if sub_group_by and sub_group_by == group_by: - return Response( - {"error": "Group by and sub group by cannot be same"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if group_by: - grouped_results = group_results(issues, group_by, sub_group_by) - return Response( - grouped_results, - status=status.HTTP_200_OK, - ) - - return Response(issues, status=status.HTTP_200_OK) - - -# TODO: deprecated remove once confirmed -class WorkSpaceIssuesEndpoint(BaseAPIView): - permission_classes = [ - WorkSpaceAdminPermission, - ] - - @method_decorator(gzip_page) - def get(self, request, slug): - issues = ( - Issue.issue_objects.filter(workspace__slug=slug) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .order_by("-created_at") - ) - serializer = IssueSerializer(issues, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class IssueActivityEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - @method_decorator(gzip_page) - def get(self, request, slug, project_id, issue_id): - filters = {} - if request.GET.get("created_at__gt", None) is not None: - filters = {"created_at__gt": request.GET.get("created_at__gt")} - - issue_activities = ( - IssueActivity.objects.filter(issue_id=issue_id) - .filter( - ~Q(field__in=["comment", "vote", "reaction", "draft"]), - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - workspace__slug=slug, - ) - .filter(**filters) - .select_related("actor", "workspace", "issue", "project") - ).order_by("created_at") - issue_comments = ( - IssueComment.objects.filter(issue_id=issue_id) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - workspace__slug=slug, - ) - .filter(**filters) - .order_by("created_at") - .select_related("actor", "issue", "project", "workspace") - .prefetch_related( - Prefetch( - "comment_reactions", - queryset=CommentReaction.objects.select_related("actor"), - ) - ) - ) - issue_activities = IssueActivitySerializer( - issue_activities, many=True - ).data - issue_comments = IssueCommentSerializer(issue_comments, many=True).data - - if request.GET.get("activity_type", None) == "issue-property": - return Response(issue_activities, status=status.HTTP_200_OK) - - if request.GET.get("activity_type", None) == "issue-comment": - return Response(issue_comments, status=status.HTTP_200_OK) - - result_list = sorted( - chain(issue_activities, issue_comments), - key=lambda instance: instance["created_at"], - ) - - return Response(result_list, status=status.HTTP_200_OK) - - -class IssueCommentViewSet(WebhookMixin, BaseViewSet): - serializer_class = IssueCommentSerializer - model = IssueComment - webhook_event = "issue_comment" - permission_classes = [ - ProjectLitePermission, - ] - - filterset_fields = [ - "issue__id", - "workspace__id", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .select_related("project") - .select_related("workspace") - .select_related("issue") - .annotate( - is_member=Exists( - ProjectMember.objects.filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - member_id=self.request.user.id, - is_active=True, - ) - ) - ) - .distinct() - ) - - def create(self, request, slug, project_id, issue_id): - serializer = IssueCommentSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - issue_id=issue_id, - actor=request.user, - ) - issue_activity.delay( - type="comment.activity.created", - requested_data=json.dumps( - serializer.data, cls=DjangoJSONEncoder - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def partial_update(self, request, slug, project_id, issue_id, pk): - issue_comment = IssueComment.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - pk=pk, - ) - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = json.dumps( - IssueCommentSerializer(issue_comment).data, - cls=DjangoJSONEncoder, - ) - serializer = IssueCommentSerializer( - issue_comment, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - issue_activity.delay( - type="comment.activity.updated", - requested_data=requested_data, - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=current_instance, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id, issue_id, pk): - issue_comment = IssueComment.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - pk=pk, - ) - current_instance = json.dumps( - IssueCommentSerializer(issue_comment).data, - cls=DjangoJSONEncoder, - ) - issue_comment.delete() - issue_activity.delay( - type="comment.activity.deleted", - requested_data=json.dumps({"comment_id": str(pk)}), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=current_instance, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(status=status.HTTP_204_NO_CONTENT) - - -class IssueUserDisplayPropertyEndpoint(BaseAPIView): - permission_classes = [ - ProjectLitePermission, - ] - - def patch(self, request, slug, project_id): - issue_property = IssueProperty.objects.get( - user=request.user, - project_id=project_id, - ) - - issue_property.filters = request.data.get( - "filters", issue_property.filters - ) - issue_property.display_filters = request.data.get( - "display_filters", issue_property.display_filters - ) - issue_property.display_properties = request.data.get( - "display_properties", issue_property.display_properties - ) - issue_property.save() - serializer = IssuePropertySerializer(issue_property) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - def get(self, request, slug, project_id): - issue_property, _ = IssueProperty.objects.get_or_create( - user=request.user, project_id=project_id - ) - serializer = IssuePropertySerializer(issue_property) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class LabelViewSet(BaseViewSet): - serializer_class = LabelSerializer - model = Label - permission_classes = [ - ProjectMemberPermission, - ] - - def create(self, request, slug, project_id): - try: - serializer = LabelSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id) - return Response( - serializer.data, status=status.HTTP_201_CREATED - ) - return Response( - serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - except IntegrityError: - return Response( - { - "error": "Label with the same name already exists in the project" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .select_related("project") - .select_related("workspace") - .select_related("parent") - .distinct() - .order_by("sort_order") - ) - - -class BulkDeleteIssuesEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - def delete(self, request, slug, project_id): - issue_ids = request.data.get("issue_ids", []) - - if not len(issue_ids): - return Response( - {"error": "Issue IDs are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - issues = Issue.issue_objects.filter( - workspace__slug=slug, project_id=project_id, pk__in=issue_ids - ) - - total_issues = len(issues) - - issues.delete() - - return Response( - {"message": f"{total_issues} issues were deleted"}, - status=status.HTTP_200_OK, - ) - - -class SubIssuesEndpoint(BaseAPIView): - permission_classes = [ - ProjectEntityPermission, - ] - - @method_decorator(gzip_page) - def get(self, request, slug, project_id, issue_id): - sub_issues = ( - Issue.issue_objects.filter( - parent_id=issue_id, workspace__slug=slug - ) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=~Q(issue_module__module_id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - ) - .annotate(state_group=F("state__group")) - ) - - # create's a dict with state group name with their respective issue id's - result = defaultdict(list) - for sub_issue in sub_issues: - result[sub_issue.state_group].append(str(sub_issue.id)) - - sub_issues = sub_issues.values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - return Response( - { - "sub_issues": sub_issues, - "state_distribution": result, - }, - status=status.HTTP_200_OK, - ) - - # Assign multiple sub issues - def post(self, request, slug, project_id, issue_id): - parent_issue = Issue.issue_objects.get(pk=issue_id) - sub_issue_ids = request.data.get("sub_issue_ids", []) - - if not len(sub_issue_ids): - return Response( - {"error": "Sub Issue IDs are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) - - for sub_issue in sub_issues: - sub_issue.parent = parent_issue - - _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) - - updated_sub_issues = Issue.issue_objects.filter( - id__in=sub_issue_ids - ).annotate(state_group=F("state__group")) - - # Track the issue - _ = [ - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"parent": str(issue_id)}), - actor_id=str(request.user.id), - issue_id=str(sub_issue_id), - project_id=str(project_id), - current_instance=json.dumps({"parent": str(sub_issue_id)}), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - for sub_issue_id in sub_issue_ids - ] - - # create's a dict with state group name with their respective issue id's - result = defaultdict(list) - for sub_issue in updated_sub_issues: - result[sub_issue.state_group].append(str(sub_issue.id)) - - serializer = IssueSerializer( - updated_sub_issues, - many=True, - ) - return Response( - { - "sub_issues": serializer.data, - "state_distribution": result, - }, - status=status.HTTP_200_OK, - ) - - -class IssueLinkViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - model = IssueLink - serializer_class = IssueLinkSerializer - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .order_by("-created_at") - .distinct() - ) - - def create(self, request, slug, project_id, issue_id): - serializer = IssueLinkSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - issue_id=issue_id, - ) - issue_activity.delay( - type="link.activity.created", - requested_data=json.dumps( - serializer.data, cls=DjangoJSONEncoder - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def partial_update(self, request, slug, project_id, issue_id, pk): - issue_link = IssueLink.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - pk=pk, - ) - requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) - current_instance = json.dumps( - IssueLinkSerializer(issue_link).data, - cls=DjangoJSONEncoder, - ) - serializer = IssueLinkSerializer( - issue_link, data=request.data, partial=True - ) - if serializer.is_valid(): - serializer.save() - issue_activity.delay( - type="link.activity.updated", - requested_data=requested_data, - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=current_instance, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id, issue_id, pk): - issue_link = IssueLink.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - pk=pk, - ) - current_instance = json.dumps( - IssueLinkSerializer(issue_link).data, - cls=DjangoJSONEncoder, - ) - issue_activity.delay( - type="link.activity.deleted", - requested_data=json.dumps({"link_id": str(pk)}), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=current_instance, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue_link.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class BulkCreateIssueLabelsEndpoint(BaseAPIView): - def post(self, request, slug, project_id): - label_data = request.data.get("label_data", []) - project = Project.objects.get(pk=project_id) - - labels = Label.objects.bulk_create( - [ - Label( - name=label.get("name", "Migrated"), - description=label.get("description", "Migrated Issue"), - color="#" + "%06x" % random.randint(0, 0xFFFFFF), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for label in label_data - ], - batch_size=50, - ignore_conflicts=True, - ) - - return Response( - {"labels": LabelSerializer(labels, many=True).data}, - status=status.HTTP_201_CREATED, - ) - - -class IssueAttachmentEndpoint(BaseAPIView): - serializer_class = IssueAttachmentSerializer - permission_classes = [ - ProjectEntityPermission, - ] - model = IssueAttachment - parser_classes = (MultiPartParser, FormParser) - - def post(self, request, slug, project_id, issue_id): - serializer = IssueAttachmentSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(project_id=project_id, issue_id=issue_id) - issue_activity.delay( - type="attachment.activity.created", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - serializer.data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def delete(self, request, slug, project_id, issue_id, pk): - issue_attachment = IssueAttachment.objects.get(pk=pk) - issue_attachment.asset.delete(save=False) - issue_attachment.delete() - issue_activity.delay( - type="attachment.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - - return Response(status=status.HTTP_204_NO_CONTENT) - - def get(self, request, slug, project_id, issue_id): - issue_attachments = IssueAttachment.objects.filter( - issue_id=issue_id, workspace__slug=slug, project_id=project_id - ) - serializer = IssueAttachmentSerializer(issue_attachments, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class IssueArchiveViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - serializer_class = IssueFlatSerializer - model = Issue - - def get_queryset(self): - return ( - Issue.objects.annotate( - sub_issues_count=Issue.objects.filter(parent=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .filter(archived_at__isnull=False) - .filter(project_id=self.kwargs.get("project_id")) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=~Q(issue_module__module_id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - ) - ) - - @method_decorator(gzip_page) - def list(self, request, slug, project_id): - filters = issue_filters(request.query_params, "GET") - show_sub_issues = request.GET.get("show_sub_issues", "true") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = self.get_queryset().filter(**filters) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" - if order_by_param.startswith("-") - else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issue_queryset = ( - issue_queryset - if show_sub_issues == "true" - else issue_queryset.filter(parent__isnull=True) - ) - if self.expand or self.fields: - issues = IssueSerializer( - issue_queryset, - many=True, - fields=self.fields, - ).data - else: - issues = issue_queryset.values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - return Response(issues, status=status.HTTP_200_OK) - - def retrieve(self, request, slug, project_id, pk=None): - issue = ( - self.get_queryset() - .filter(pk=pk) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related( - "issue", "actor" - ), - ) - ) - .prefetch_related( - Prefetch( - "issue_attachment", - queryset=IssueAttachment.objects.select_related("issue"), - ) - ) - .prefetch_related( - Prefetch( - "issue_link", - queryset=IssueLink.objects.select_related("created_by"), - ) - ) - .annotate( - is_subscribed=Exists( - IssueSubscriber.objects.filter( - workspace__slug=slug, - project_id=project_id, - issue_id=OuterRef("pk"), - subscriber=request.user, - ) - ) - ) - ).first() - if not issue: - return Response( - {"error": "The required object does not exist."}, - status=status.HTTP_404_NOT_FOUND, - ) - serializer = IssueDetailSerializer(issue, expand=self.expand) - return Response(serializer.data, status=status.HTTP_200_OK) - - def archive(self, request, slug, project_id, pk=None): - issue = Issue.issue_objects.get( - workspace__slug=slug, - project_id=project_id, - pk=pk, - ) - if issue.state.group not in ["completed", "cancelled"]: - return Response( - {"error": "Can only archive completed or cancelled state group issue"}, - status=status.HTTP_400_BAD_REQUEST, - ) - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"archived_at": str(timezone.now().date()), "automation": False}), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=json.dumps( - IssueSerializer(issue).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue.archived_at = timezone.now().date() - issue.save() - - return Response({"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK) - - - def unarchive(self, request, slug, project_id, pk=None): - issue = Issue.objects.get( - workspace__slug=slug, - project_id=project_id, - archived_at__isnull=False, - pk=pk, - ) - issue_activity.delay( - type="issue.activity.updated", - requested_data=json.dumps({"archived_at": None}), - actor_id=str(request.user.id), - issue_id=str(issue.id), - project_id=str(project_id), - current_instance=json.dumps( - IssueSerializer(issue).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue.archived_at = None - issue.save() - - return Response(status=status.HTTP_204_NO_CONTENT) - - -class IssueSubscriberViewSet(BaseViewSet): - serializer_class = IssueSubscriberSerializer - model = IssueSubscriber - - permission_classes = [ - ProjectEntityPermission, - ] - - def get_permissions(self): - if self.action in ["subscribe", "unsubscribe", "subscription_status"]: - self.permission_classes = [ - ProjectLitePermission, - ] - else: - self.permission_classes = [ - ProjectEntityPermission, - ] - - return super(IssueSubscriberViewSet, self).get_permissions() - - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - ) - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .order_by("-created_at") - .distinct() - ) - - def list(self, request, slug, project_id, issue_id): - members = ProjectMember.objects.filter( - workspace__slug=slug, - project_id=project_id, - is_active=True, - ).select_related("member") - serializer = ProjectMemberLiteSerializer(members, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - def destroy(self, request, slug, project_id, issue_id, subscriber_id): - issue_subscriber = IssueSubscriber.objects.get( - project=project_id, - subscriber=subscriber_id, - workspace__slug=slug, - issue=issue_id, - ) - issue_subscriber.delete() - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - - def subscribe(self, request, slug, project_id, issue_id): - if IssueSubscriber.objects.filter( - issue_id=issue_id, - subscriber=request.user, - workspace__slug=slug, - project=project_id, - ).exists(): - return Response( - {"message": "User already subscribed to the issue."}, - status=status.HTTP_400_BAD_REQUEST, - ) - - subscriber = IssueSubscriber.objects.create( - issue_id=issue_id, - subscriber_id=request.user.id, - project_id=project_id, - ) - serializer = IssueSubscriberSerializer(subscriber) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - def unsubscribe(self, request, slug, project_id, issue_id): - issue_subscriber = IssueSubscriber.objects.get( - project=project_id, - subscriber=request.user, - workspace__slug=slug, - issue=issue_id, - ) - issue_subscriber.delete() - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - - def subscription_status(self, request, slug, project_id, issue_id): - issue_subscriber = IssueSubscriber.objects.filter( - issue=issue_id, - subscriber=request.user, - workspace__slug=slug, - project=project_id, - ).exists() - return Response( - {"subscribed": issue_subscriber}, status=status.HTTP_200_OK - ) - - -class IssueReactionViewSet(BaseViewSet): - serializer_class = IssueReactionSerializer - model = IssueReaction - permission_classes = [ - ProjectLitePermission, - ] - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .order_by("-created_at") - .distinct() - ) - - def create(self, request, slug, project_id, issue_id): - serializer = IssueReactionSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - issue_id=issue_id, - project_id=project_id, - actor=request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id, issue_id, reaction_code): - issue_reaction = IssueReaction.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(issue_reaction.id), - } - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class CommentReactionViewSet(BaseViewSet): - serializer_class = CommentReactionSerializer - model = CommentReaction - permission_classes = [ - ProjectLitePermission, - ] - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(comment_id=self.kwargs.get("comment_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .order_by("-created_at") - .distinct() - ) - - def create(self, request, slug, project_id, comment_id): - serializer = CommentReactionSerializer(data=request.data) - if serializer.is_valid(): - serializer.save( - project_id=project_id, - actor_id=request.user.id, - comment_id=comment_id, - ) - issue_activity.delay( - type="comment_reaction.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=None, - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id, comment_id, reaction_code): - comment_reaction = CommentReaction.objects.get( - workspace__slug=slug, - project_id=project_id, - comment_id=comment_id, - reaction=reaction_code, - actor=request.user, - ) - issue_activity.delay( - type="comment_reaction.activity.deleted", - requested_data=None, - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - { - "reaction": str(reaction_code), - "identifier": str(comment_reaction.id), - "comment_id": str(comment_id), - } - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - comment_reaction.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class IssueRelationViewSet(BaseViewSet): - serializer_class = IssueRelationSerializer - model = IssueRelation - permission_classes = [ - ProjectEntityPermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .select_related("project") - .select_related("workspace") - .select_related("issue") - .distinct() - ) - - def list(self, request, slug, project_id, issue_id): - issue_relations = ( - IssueRelation.objects.filter( - Q(issue_id=issue_id) | Q(related_issue=issue_id) - ) - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("project") - .select_related("workspace") - .select_related("issue") - .order_by("-created_at") - .distinct() - ) - - blocking_issues = issue_relations.filter( - relation_type="blocked_by", related_issue_id=issue_id - ) - blocked_by_issues = issue_relations.filter( - relation_type="blocked_by", issue_id=issue_id - ) - duplicate_issues = issue_relations.filter( - issue_id=issue_id, relation_type="duplicate" - ) - duplicate_issues_related = issue_relations.filter( - related_issue_id=issue_id, relation_type="duplicate" - ) - relates_to_issues = issue_relations.filter( - issue_id=issue_id, relation_type="relates_to" - ) - relates_to_issues_related = issue_relations.filter( - related_issue_id=issue_id, relation_type="relates_to" - ) - - blocked_by_issues_serialized = IssueRelationSerializer( - blocked_by_issues, many=True - ).data - duplicate_issues_serialized = IssueRelationSerializer( - duplicate_issues, many=True - ).data - relates_to_issues_serialized = IssueRelationSerializer( - relates_to_issues, many=True - ).data - - # revere relation for blocked by issues - blocking_issues_serialized = RelatedIssueSerializer( - blocking_issues, many=True - ).data - # reverse relation for duplicate issues - duplicate_issues_related_serialized = RelatedIssueSerializer( - duplicate_issues_related, many=True - ).data - # reverse relation for related issues - relates_to_issues_related_serialized = RelatedIssueSerializer( - relates_to_issues_related, many=True - ).data - - response_data = { - "blocking": blocking_issues_serialized, - "blocked_by": blocked_by_issues_serialized, - "duplicate": duplicate_issues_serialized - + duplicate_issues_related_serialized, - "relates_to": relates_to_issues_serialized - + relates_to_issues_related_serialized, - } - - return Response(response_data, status=status.HTTP_200_OK) - - def create(self, request, slug, project_id, issue_id): - relation_type = request.data.get("relation_type", None) - issues = request.data.get("issues", []) - project = Project.objects.get(pk=project_id) - - issue_relation = IssueRelation.objects.bulk_create( - [ - IssueRelation( - issue_id=( - issue if relation_type == "blocking" else issue_id - ), - related_issue_id=( - issue_id if relation_type == "blocking" else issue - ), - relation_type=( - "blocked_by" - if relation_type == "blocking" - else relation_type - ), - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for issue in issues - ], - batch_size=10, - ignore_conflicts=True, - ) - - issue_activity.delay( - type="issue_relation.activity.created", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - - if relation_type == "blocking": - return Response( - RelatedIssueSerializer(issue_relation, many=True).data, - status=status.HTTP_201_CREATED, - ) - else: - return Response( - IssueRelationSerializer(issue_relation, many=True).data, - status=status.HTTP_201_CREATED, - ) - - def remove_relation(self, request, slug, project_id, issue_id): - relation_type = request.data.get("relation_type", None) - related_issue = request.data.get("related_issue", None) - - if relation_type == "blocking": - issue_relation = IssueRelation.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=related_issue, - related_issue_id=issue_id, - ) - else: - issue_relation = IssueRelation.objects.get( - workspace__slug=slug, - project_id=project_id, - issue_id=issue_id, - related_issue_id=related_issue, - ) - current_instance = json.dumps( - IssueRelationSerializer(issue_relation).data, - cls=DjangoJSONEncoder, - ) - issue_relation.delete() - issue_activity.delay( - type="issue_relation.activity.deleted", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=current_instance, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(status=status.HTTP_204_NO_CONTENT) - - -class IssueDraftViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - serializer_class = IssueFlatSerializer - model = Issue - - def get_queryset(self): - return ( - Issue.objects.filter(project_id=self.kwargs.get("project_id")) - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(is_draft=True) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=~Q(issue_module__module_id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - ) - ).distinct() - - @method_decorator(gzip_page) - def list(self, request, slug, project_id): - filters = issue_filters(request.query_params, "GET") - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - order_by_param = request.GET.get("order_by", "-created_at") - - issue_queryset = self.get_queryset().filter(**filters) - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" - if order_by_param.startswith("-") - else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - # Only use serializer when expand else return by values - if self.expand or self.fields: - issues = IssueSerializer( - issue_queryset, - many=True, - fields=self.fields, - expand=self.expand, - ).data - else: - issues = issue_queryset.values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - return Response(issues, status=status.HTTP_200_OK) - - def create(self, request, slug, project_id): - project = Project.objects.get(pk=project_id) - - serializer = IssueCreateSerializer( - data=request.data, - context={ - "project_id": project_id, - "workspace_id": project.workspace_id, - "default_assignee_id": project.default_assignee_id, - }, - ) - - if serializer.is_valid(): - serializer.save(is_draft=True) - - # Track the issue - issue_activity.delay( - type="issue_draft.activity.created", - requested_data=json.dumps( - self.request.data, cls=DjangoJSONEncoder - ), - actor_id=str(request.user.id), - issue_id=str(serializer.data.get("id", None)), - project_id=str(project_id), - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - issue = ( - self.get_queryset().filter(pk=serializer.data["id"]).first() - ) - return Response( - IssueSerializer(issue).data, status=status.HTTP_201_CREATED - ) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def partial_update(self, request, slug, project_id, pk): - issue = self.get_queryset().filter(pk=pk).first() - - if not issue: - return Response( - {"error": "Issue does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - - serializer = IssueCreateSerializer(issue, data=request.data, partial=True) - - if serializer.is_valid(): - serializer.save() - issue_activity.delay( - type="issue_draft.activity.updated", - requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(issue).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(status=status.HTTP_204_NO_CONTENT) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def retrieve(self, request, slug, project_id, pk=None): - issue = ( - self.get_queryset() - .filter(pk=pk) - .prefetch_related( - Prefetch( - "issue_reactions", - queryset=IssueReaction.objects.select_related( - "issue", "actor" - ), - ) - ) - .prefetch_related( - Prefetch( - "issue_attachment", - queryset=IssueAttachment.objects.select_related("issue"), - ) - ) - .prefetch_related( - Prefetch( - "issue_link", - queryset=IssueLink.objects.select_related("created_by"), - ) - ) - .annotate( - is_subscribed=Exists( - IssueSubscriber.objects.filter( - workspace__slug=slug, - project_id=project_id, - issue_id=OuterRef("pk"), - subscriber=request.user, - ) - ) - ) - ).first() - - if not issue: - return Response( - {"error": "The required object does not exist."}, - status=status.HTTP_404_NOT_FOUND, - ) - serializer = IssueDetailSerializer(issue, expand=self.expand) - return Response(serializer.data, status=status.HTTP_200_OK) - - def destroy(self, request, slug, project_id, pk=None): - issue = Issue.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - issue.delete() - issue_activity.delay( - type="issue_draft.activity.deleted", - requested_data=json.dumps({"issue_id": str(pk)}), - actor_id=str(request.user.id), - issue_id=str(pk), - project_id=str(project_id), - current_instance={}, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/activity.py b/apiserver/plane/app/views/issue/activity.py new file mode 100644 index 00000000000..6815b254ed7 --- /dev/null +++ b/apiserver/plane/app/views/issue/activity.py @@ -0,0 +1,87 @@ +# Python imports +from itertools import chain + +# Django imports +from django.db.models import ( + Prefetch, + Q, +) +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseAPIView +from plane.app.serializers import ( + IssueActivitySerializer, + IssueCommentSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + IssueActivity, + IssueComment, + CommentReaction, +) + + +class IssueActivityEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + @method_decorator(gzip_page) + def get(self, request, slug, project_id, issue_id): + filters = {} + if request.GET.get("created_at__gt", None) is not None: + filters = {"created_at__gt": request.GET.get("created_at__gt")} + + issue_activities = ( + IssueActivity.objects.filter(issue_id=issue_id) + .filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + workspace__slug=slug, + ) + .filter(**filters) + .select_related("actor", "workspace", "issue", "project") + ).order_by("created_at") + issue_comments = ( + IssueComment.objects.filter(issue_id=issue_id) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + workspace__slug=slug, + ) + .filter(**filters) + .order_by("created_at") + .select_related("actor", "issue", "project", "workspace") + .prefetch_related( + Prefetch( + "comment_reactions", + queryset=CommentReaction.objects.select_related("actor"), + ) + ) + ) + issue_activities = IssueActivitySerializer( + issue_activities, many=True + ).data + issue_comments = IssueCommentSerializer(issue_comments, many=True).data + + if request.GET.get("activity_type", None) == "issue-property": + return Response(issue_activities, status=status.HTTP_200_OK) + + if request.GET.get("activity_type", None) == "issue-comment": + return Response(issue_comments, status=status.HTTP_200_OK) + + result_list = sorted( + chain(issue_activities, issue_comments), + key=lambda instance: instance["created_at"], + ) + + return Response(result_list, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/issue/archive.py b/apiserver/plane/app/views/issue/archive.py new file mode 100644 index 00000000000..d9274ae4fa3 --- /dev/null +++ b/apiserver/plane/app/views/issue/archive.py @@ -0,0 +1,348 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Q, + Case, + Value, + CharField, + When, + Exists, + Max, + UUIDField, +) +from django.core.serializers.json import DjangoJSONEncoder +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models.functions import Coalesce + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet +from plane.app.serializers import ( + IssueSerializer, + IssueFlatSerializer, + IssueDetailSerializer, +) +from plane.app.permissions import ( + ProjectEntityPermission, +) +from plane.db.models import ( + Issue, + IssueLink, + IssueAttachment, + IssueSubscriber, + IssueReaction, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.issue_filters import issue_filters + + +class IssueArchiveViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + def get_queryset(self): + return ( + Issue.objects.annotate( + sub_issues_count=Issue.objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(archived_at__isnull=False) + .filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + ) + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + filters = issue_filters(request.query_params, "GET") + show_sub_issues = request.GET.get("show_sub_issues", "true") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = self.get_queryset().filter(**filters) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" + if order_by_param.startswith("-") + else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issue_queryset = ( + issue_queryset + if show_sub_issues == "true" + else issue_queryset.filter(parent__isnull=True) + ) + if self.expand or self.fields: + issues = IssueSerializer( + issue_queryset, + many=True, + fields=self.fields, + ).data + else: + issues = issue_queryset.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response(issues, status=status.HTTP_200_OK) + + def retrieve(self, request, slug, project_id, pk=None): + issue = ( + self.get_queryset() + .filter(pk=pk) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related( + "issue", "actor" + ), + ) + ) + .prefetch_related( + Prefetch( + "issue_attachment", + queryset=IssueAttachment.objects.select_related("issue"), + ) + ) + .prefetch_related( + Prefetch( + "issue_link", + queryset=IssueLink.objects.select_related("created_by"), + ) + ) + .annotate( + is_subscribed=Exists( + IssueSubscriber.objects.filter( + workspace__slug=slug, + project_id=project_id, + issue_id=OuterRef("pk"), + subscriber=request.user, + ) + ) + ) + ).first() + if not issue: + return Response( + {"error": "The required object does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + serializer = IssueDetailSerializer(issue, expand=self.expand) + return Response(serializer.data, status=status.HTTP_200_OK) + + def archive(self, request, slug, project_id, pk=None): + issue = Issue.issue_objects.get( + workspace__slug=slug, + project_id=project_id, + pk=pk, + ) + if issue.state.group not in ["completed", "cancelled"]: + return Response( + { + "error": "Can only archive completed or cancelled state group issue" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps( + { + "archived_at": str(timezone.now().date()), + "automation": False, + } + ), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue.archived_at = timezone.now().date() + issue.save() + + return Response( + {"archived_at": str(issue.archived_at)}, status=status.HTTP_200_OK + ) + + def unarchive(self, request, slug, project_id, pk=None): + issue = Issue.objects.get( + workspace__slug=slug, + project_id=project_id, + archived_at__isnull=False, + pk=pk, + ) + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"archived_at": None}), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue.archived_at = None + issue.save() + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/attachment.py b/apiserver/plane/app/views/issue/attachment.py new file mode 100644 index 00000000000..c2b8ad6ff7b --- /dev/null +++ b/apiserver/plane/app/views/issue/attachment.py @@ -0,0 +1,73 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework.parsers import MultiPartParser, FormParser + +# Module imports +from .. import BaseAPIView +from plane.app.serializers import IssueAttachmentSerializer +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import IssueAttachment +from plane.bgtasks.issue_activites_task import issue_activity + + +class IssueAttachmentEndpoint(BaseAPIView): + serializer_class = IssueAttachmentSerializer + permission_classes = [ + ProjectEntityPermission, + ] + model = IssueAttachment + parser_classes = (MultiPartParser, FormParser) + + def post(self, request, slug, project_id, issue_id): + serializer = IssueAttachmentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id, issue_id=issue_id) + issue_activity.delay( + type="attachment.activity.created", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + serializer.data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def delete(self, request, slug, project_id, issue_id, pk): + issue_attachment = IssueAttachment.objects.get(pk=pk) + issue_attachment.asset.delete(save=False) + issue_attachment.delete() + issue_activity.delay( + type="attachment.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + + return Response(status=status.HTTP_204_NO_CONTENT) + + def get(self, request, slug, project_id, issue_id): + issue_attachments = IssueAttachment.objects.filter( + issue_id=issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueAttachmentSerializer(issue_attachments, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/issue/base.py b/apiserver/plane/app/views/issue/base.py new file mode 100644 index 00000000000..a27f52c748a --- /dev/null +++ b/apiserver/plane/app/views/issue/base.py @@ -0,0 +1,661 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.db.models import ( + Prefetch, + OuterRef, + Func, + F, + Q, + Case, + Value, + CharField, + When, + Exists, + Max, +) +from django.core.serializers.json import DjangoJSONEncoder +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models import UUIDField +from django.db.models.functions import Coalesce + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet, BaseAPIView, WebhookMixin +from plane.app.serializers import ( + IssuePropertySerializer, + IssueSerializer, + IssueCreateSerializer, + IssueDetailSerializer, +) +from plane.app.permissions import ( + ProjectEntityPermission, + ProjectLitePermission, +) +from plane.db.models import ( + Project, + Issue, + IssueProperty, + IssueLink, + IssueAttachment, + IssueSubscriber, + IssueReaction, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.issue_filters import issue_filters + + +class IssueListEndpoint(BaseAPIView): + + permission_classes = [ + ProjectEntityPermission, + ] + + def get(self, request, slug, project_id): + issue_ids = request.GET.get("issues", False) + + if not issue_ids: + return Response( + {"error": "Issues are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + issue_ids = [ + issue_id for issue_id in issue_ids.split(",") if issue_id != "" + ] + + queryset = ( + Issue.issue_objects.filter( + workspace__slug=slug, project_id=project_id, pk__in=issue_ids + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + ).distinct() + + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = queryset.filter(**filters) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" + if order_by_param.startswith("-") + else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + if self.fields or self.expand: + issues = IssueSerializer( + queryset, many=True, fields=self.fields, expand=self.expand + ).data + else: + issues = issue_queryset.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response(issues, status=status.HTTP_200_OK) + + +class IssueViewSet(WebhookMixin, BaseViewSet): + def get_serializer_class(self): + return ( + IssueCreateSerializer + if self.action in ["create", "update", "partial_update"] + else IssueSerializer + ) + + model = Issue + webhook_event = "issue" + permission_classes = [ + ProjectEntityPermission, + ] + + search_fields = [ + "name", + ] + + filterset_fields = [ + "state__name", + "assignees__id", + "workspace__id", + ] + + def get_queryset(self): + return ( + Issue.issue_objects.filter( + project_id=self.kwargs.get("project_id") + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + ).distinct() + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + filters = issue_filters(request.query_params, "GET") + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = self.get_queryset().filter(**filters) + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" + if order_by_param.startswith("-") + else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + # Only use serializer when expand or fields else return by values + if self.expand or self.fields: + issues = IssueSerializer( + issue_queryset, + many=True, + fields=self.fields, + expand=self.expand, + ).data + else: + issues = issue_queryset.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response(issues, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save() + + # Track the issue + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps( + self.request.data, cls=DjangoJSONEncoder + ), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue = ( + self.get_queryset() + .filter(pk=serializer.data["id"]) + .values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + .first() + ) + return Response(issue, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk=None): + issue = ( + self.get_queryset() + .filter(pk=pk) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related( + "issue", "actor" + ), + ) + ) + .prefetch_related( + Prefetch( + "issue_attachment", + queryset=IssueAttachment.objects.select_related("issue"), + ) + ) + .prefetch_related( + Prefetch( + "issue_link", + queryset=IssueLink.objects.select_related("created_by"), + ) + ) + .annotate( + is_subscribed=Exists( + IssueSubscriber.objects.filter( + workspace__slug=slug, + project_id=project_id, + issue_id=OuterRef("pk"), + subscriber=request.user, + ) + ) + ) + ).first() + if not issue: + return Response( + {"error": "The required object does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + + serializer = IssueDetailSerializer(issue, expand=self.expand) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, pk=None): + issue = self.get_queryset().filter(pk=pk).first() + + if not issue: + return Response( + {"error": "Issue not found"}, + status=status.HTTP_404_NOT_FOUND, + ) + + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + serializer = IssueCreateSerializer( + issue, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue = self.get_queryset().filter(pk=pk).first() + return Response(status=status.HTTP_204_NO_CONTENT) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, pk=None): + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + issue.delete() + issue_activity.delay( + type="issue.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance={}, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class IssueUserDisplayPropertyEndpoint(BaseAPIView): + permission_classes = [ + ProjectLitePermission, + ] + + def patch(self, request, slug, project_id): + issue_property = IssueProperty.objects.get( + user=request.user, + project_id=project_id, + ) + + issue_property.filters = request.data.get( + "filters", issue_property.filters + ) + issue_property.display_filters = request.data.get( + "display_filters", issue_property.display_filters + ) + issue_property.display_properties = request.data.get( + "display_properties", issue_property.display_properties + ) + issue_property.save() + serializer = IssuePropertySerializer(issue_property) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def get(self, request, slug, project_id): + issue_property, _ = IssueProperty.objects.get_or_create( + user=request.user, project_id=project_id + ) + serializer = IssuePropertySerializer(issue_property) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class BulkDeleteIssuesEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def delete(self, request, slug, project_id): + issue_ids = request.data.get("issue_ids", []) + + if not len(issue_ids): + return Response( + {"error": "Issue IDs are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + issues = Issue.issue_objects.filter( + workspace__slug=slug, project_id=project_id, pk__in=issue_ids + ) + + total_issues = len(issues) + + issues.delete() + + return Response( + {"message": f"{total_issues} issues were deleted"}, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/issue/comment.py b/apiserver/plane/app/views/issue/comment.py new file mode 100644 index 00000000000..0d61f132576 --- /dev/null +++ b/apiserver/plane/app/views/issue/comment.py @@ -0,0 +1,221 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.db.models import Exists +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet, WebhookMixin +from plane.app.serializers import ( + IssueCommentSerializer, + CommentReactionSerializer, +) +from plane.app.permissions import ProjectLitePermission +from plane.db.models import ( + IssueComment, + ProjectMember, + CommentReaction, +) +from plane.bgtasks.issue_activites_task import issue_activity + + +class IssueCommentViewSet(WebhookMixin, BaseViewSet): + serializer_class = IssueCommentSerializer + model = IssueComment + webhook_event = "issue_comment" + permission_classes = [ + ProjectLitePermission, + ] + + filterset_fields = [ + "issue__id", + "workspace__id", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + member_id=self.request.user.id, + is_active=True, + ) + ) + ) + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps( + serializer.data, cls=DjangoJSONEncoder + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + pk=pk, + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueCommentSerializer( + issue_comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_comment = IssueComment.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + pk=pk, + ) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + issue_comment.delete() + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + + +class CommentReactionViewSet(BaseViewSet): + serializer_class = CommentReactionSerializer + model = CommentReaction + permission_classes = [ + ProjectLitePermission, + ] + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(comment_id=self.kwargs.get("comment_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .order_by("-created_at") + .distinct() + ) + + def create(self, request, slug, project_id, comment_id): + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + actor_id=request.user.id, + comment_id=comment_id, + ) + issue_activity.delay( + type="comment_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=None, + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, comment_id, reaction_code): + comment_reaction = CommentReaction.objects.get( + workspace__slug=slug, + project_id=project_id, + comment_id=comment_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="comment_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=None, + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(comment_reaction.id), + "comment_id": str(comment_id), + } + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + comment_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/draft.py b/apiserver/plane/app/views/issue/draft.py new file mode 100644 index 00000000000..e1c6962d896 --- /dev/null +++ b/apiserver/plane/app/views/issue/draft.py @@ -0,0 +1,365 @@ +# Python imports +import json + +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.core.serializers.json import DjangoJSONEncoder +from django.db.models import ( + Case, + CharField, + Exists, + F, + Func, + Max, + OuterRef, + Prefetch, + Q, + UUIDField, + Value, + When, +) +from django.db.models.functions import Coalesce + +# Django imports +from django.utils import timezone +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from rest_framework import status + +# Third Party imports +from rest_framework.response import Response + +from plane.app.permissions import ProjectEntityPermission +from plane.app.serializers import ( + IssueCreateSerializer, + IssueDetailSerializer, + IssueFlatSerializer, + IssueSerializer, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.db.models import ( + Issue, + IssueAttachment, + IssueLink, + IssueReaction, + IssueSubscriber, + Project, +) +from plane.utils.issue_filters import issue_filters + +# Module imports +from .. import BaseViewSet + + +class IssueDraftViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + serializer_class = IssueFlatSerializer + model = Issue + + def get_queryset(self): + return ( + Issue.objects.filter(project_id=self.kwargs.get("project_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(is_draft=True) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + ).distinct() + + @method_decorator(gzip_page) + def list(self, request, slug, project_id): + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = self.get_queryset().filter(**filters) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" + if order_by_param.startswith("-") + else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + # Only use serializer when expand else return by values + if self.expand or self.fields: + issues = IssueSerializer( + issue_queryset, + many=True, + fields=self.fields, + expand=self.expand, + ).data + else: + issues = issue_queryset.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response(issues, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id): + project = Project.objects.get(pk=project_id) + + serializer = IssueCreateSerializer( + data=request.data, + context={ + "project_id": project_id, + "workspace_id": project.workspace_id, + "default_assignee_id": project.default_assignee_id, + }, + ) + + if serializer.is_valid(): + serializer.save(is_draft=True) + + # Track the issue + issue_activity.delay( + type="issue_draft.activity.created", + requested_data=json.dumps( + self.request.data, cls=DjangoJSONEncoder + ), + actor_id=str(request.user.id), + issue_id=str(serializer.data.get("id", None)), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue = ( + self.get_queryset().filter(pk=serializer.data["id"]).first() + ) + return Response( + IssueSerializer(issue).data, status=status.HTTP_201_CREATED + ) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, pk): + issue = self.get_queryset().filter(pk=pk).first() + + if not issue: + return Response( + {"error": "Issue does not exist"}, + status=status.HTTP_404_NOT_FOUND, + ) + + serializer = IssueCreateSerializer( + issue, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue_draft.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + IssueSerializer(issue).data, + cls=DjangoJSONEncoder, + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def retrieve(self, request, slug, project_id, pk=None): + issue = ( + self.get_queryset() + .filter(pk=pk) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related( + "issue", "actor" + ), + ) + ) + .prefetch_related( + Prefetch( + "issue_attachment", + queryset=IssueAttachment.objects.select_related("issue"), + ) + ) + .prefetch_related( + Prefetch( + "issue_link", + queryset=IssueLink.objects.select_related("created_by"), + ) + ) + .annotate( + is_subscribed=Exists( + IssueSubscriber.objects.filter( + workspace__slug=slug, + project_id=project_id, + issue_id=OuterRef("pk"), + subscriber=request.user, + ) + ) + ) + ).first() + + if not issue: + return Response( + {"error": "The required object does not exist."}, + status=status.HTTP_404_NOT_FOUND, + ) + serializer = IssueDetailSerializer(issue, expand=self.expand) + return Response(serializer.data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, pk=None): + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + issue.delete() + issue_activity.delay( + type="issue_draft.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance={}, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/label.py b/apiserver/plane/app/views/issue/label.py new file mode 100644 index 00000000000..c5dc35809e9 --- /dev/null +++ b/apiserver/plane/app/views/issue/label.py @@ -0,0 +1,105 @@ +# Python imports +import random + +# Django imports +from django.db import IntegrityError + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet, BaseAPIView +from plane.app.serializers import LabelSerializer +from plane.app.permissions import ( + ProjectMemberPermission, +) +from plane.db.models import ( + Project, + Label, +) +from plane.utils.cache import invalidate_cache + + +class LabelViewSet(BaseViewSet): + serializer_class = LabelSerializer + model = Label + permission_classes = [ + ProjectMemberPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("parent") + .distinct() + .order_by("sort_order") + ) + + @invalidate_cache( + path="/api/workspaces/:slug/labels/", url_params=True, user=False + ) + def create(self, request, slug, project_id): + try: + serializer = LabelSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(project_id=project_id) + return Response( + serializer.data, status=status.HTTP_201_CREATED + ) + return Response( + serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + except IntegrityError: + return Response( + { + "error": "Label with the same name already exists in the project" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + @invalidate_cache( + path="/api/workspaces/:slug/labels/", url_params=True, user=False + ) + def partial_update(self, request, *args, **kwargs): + return super().partial_update(request, *args, **kwargs) + + @invalidate_cache( + path="/api/workspaces/:slug/labels/", url_params=True, user=False + ) + def destroy(self, request, *args, **kwargs): + return super().destroy(request, *args, **kwargs) + + +class BulkCreateIssueLabelsEndpoint(BaseAPIView): + def post(self, request, slug, project_id): + label_data = request.data.get("label_data", []) + project = Project.objects.get(pk=project_id) + + labels = Label.objects.bulk_create( + [ + Label( + name=label.get("name", "Migrated"), + description=label.get("description", "Migrated Issue"), + color=f"#{random.randint(0, 0xFFFFFF+1):06X}", + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for label in label_data + ], + batch_size=50, + ignore_conflicts=True, + ) + + return Response( + {"labels": LabelSerializer(labels, many=True).data}, + status=status.HTTP_201_CREATED, + ) diff --git a/apiserver/plane/app/views/issue/link.py b/apiserver/plane/app/views/issue/link.py new file mode 100644 index 00000000000..c965a7d4d11 --- /dev/null +++ b/apiserver/plane/app/views/issue/link.py @@ -0,0 +1,121 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet +from plane.app.serializers import IssueLinkSerializer +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import IssueLink +from plane.bgtasks.issue_activites_task import issue_activity + + +class IssueLinkViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + model = IssueLink + serializer_class = IssueLinkSerializer + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .order_by("-created_at") + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + serializer = IssueLinkSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps( + serializer.data, cls=DjangoJSONEncoder + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + pk=pk, + ) + requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueLinkSerializer( + issue_link, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="link.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, pk): + issue_link = IssueLink.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + pk=pk, + ) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + issue_activity.delay( + type="link.activity.deleted", + requested_data=json.dumps({"link_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue_link.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/reaction.py b/apiserver/plane/app/views/issue/reaction.py new file mode 100644 index 00000000000..da8f6ebb581 --- /dev/null +++ b/apiserver/plane/app/views/issue/reaction.py @@ -0,0 +1,90 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet +from plane.app.serializers import IssueReactionSerializer +from plane.app.permissions import ProjectLitePermission +from plane.db.models import IssueReaction +from plane.bgtasks.issue_activites_task import issue_activity + + +class IssueReactionViewSet(BaseViewSet): + serializer_class = IssueReactionSerializer + model = IssueReaction + permission_classes = [ + ProjectLitePermission, + ] + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .order_by("-created_at") + .distinct() + ) + + def create(self, request, slug, project_id, issue_id): + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + issue_id=issue_id, + project_id=project_id, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, issue_id, reaction_code): + issue_reaction = IssueReaction.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + reaction=reaction_code, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.deleted", + requested_data=None, + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=json.dumps( + { + "reaction": str(reaction_code), + "identifier": str(issue_reaction.id), + } + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + issue_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/relation.py b/apiserver/plane/app/views/issue/relation.py new file mode 100644 index 00000000000..eb5aff9af66 --- /dev/null +++ b/apiserver/plane/app/views/issue/relation.py @@ -0,0 +1,205 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.db.models import Q +from django.core.serializers.json import DjangoJSONEncoder + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet +from plane.app.serializers import ( + IssueRelationSerializer, + RelatedIssueSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Project, + IssueRelation, +) +from plane.bgtasks.issue_activites_task import issue_activity + + +class IssueRelationViewSet(BaseViewSet): + serializer_class = IssueRelationSerializer + model = IssueRelation + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .distinct() + ) + + def list(self, request, slug, project_id, issue_id): + issue_relations = ( + IssueRelation.objects.filter( + Q(issue_id=issue_id) | Q(related_issue=issue_id) + ) + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .order_by("-created_at") + .distinct() + ) + + blocking_issues = issue_relations.filter( + relation_type="blocked_by", related_issue_id=issue_id + ) + blocked_by_issues = issue_relations.filter( + relation_type="blocked_by", issue_id=issue_id + ) + duplicate_issues = issue_relations.filter( + issue_id=issue_id, relation_type="duplicate" + ) + duplicate_issues_related = issue_relations.filter( + related_issue_id=issue_id, relation_type="duplicate" + ) + relates_to_issues = issue_relations.filter( + issue_id=issue_id, relation_type="relates_to" + ) + relates_to_issues_related = issue_relations.filter( + related_issue_id=issue_id, relation_type="relates_to" + ) + + blocked_by_issues_serialized = IssueRelationSerializer( + blocked_by_issues, many=True + ).data + duplicate_issues_serialized = IssueRelationSerializer( + duplicate_issues, many=True + ).data + relates_to_issues_serialized = IssueRelationSerializer( + relates_to_issues, many=True + ).data + + # revere relation for blocked by issues + blocking_issues_serialized = RelatedIssueSerializer( + blocking_issues, many=True + ).data + # reverse relation for duplicate issues + duplicate_issues_related_serialized = RelatedIssueSerializer( + duplicate_issues_related, many=True + ).data + # reverse relation for related issues + relates_to_issues_related_serialized = RelatedIssueSerializer( + relates_to_issues_related, many=True + ).data + + response_data = { + "blocking": blocking_issues_serialized, + "blocked_by": blocked_by_issues_serialized, + "duplicate": duplicate_issues_serialized + + duplicate_issues_related_serialized, + "relates_to": relates_to_issues_serialized + + relates_to_issues_related_serialized, + } + + return Response(response_data, status=status.HTTP_200_OK) + + def create(self, request, slug, project_id, issue_id): + relation_type = request.data.get("relation_type", None) + issues = request.data.get("issues", []) + project = Project.objects.get(pk=project_id) + + issue_relation = IssueRelation.objects.bulk_create( + [ + IssueRelation( + issue_id=( + issue if relation_type == "blocking" else issue_id + ), + related_issue_id=( + issue_id if relation_type == "blocking" else issue + ), + relation_type=( + "blocked_by" + if relation_type == "blocking" + else relation_type + ), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for issue in issues + ], + batch_size=10, + ignore_conflicts=True, + ) + + issue_activity.delay( + type="issue_relation.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + + if relation_type == "blocking": + return Response( + RelatedIssueSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + else: + return Response( + IssueRelationSerializer(issue_relation, many=True).data, + status=status.HTTP_201_CREATED, + ) + + def remove_relation(self, request, slug, project_id, issue_id): + relation_type = request.data.get("relation_type", None) + related_issue = request.data.get("related_issue", None) + + if relation_type == "blocking": + issue_relation = IssueRelation.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=related_issue, + related_issue_id=issue_id, + ) + else: + issue_relation = IssueRelation.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + related_issue_id=related_issue, + ) + current_instance = json.dumps( + IssueRelationSerializer(issue_relation).data, + cls=DjangoJSONEncoder, + ) + issue_relation.delete() + issue_activity.delay( + type="issue_relation.activity.deleted", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/issue/sub_issue.py b/apiserver/plane/app/views/issue/sub_issue.py new file mode 100644 index 00000000000..da479e0e99e --- /dev/null +++ b/apiserver/plane/app/views/issue/sub_issue.py @@ -0,0 +1,196 @@ +# Python imports +import json + +# Django imports +from django.utils import timezone +from django.db.models import ( + OuterRef, + Func, + F, + Q, + Value, + UUIDField, +) +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models.functions import Coalesce + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseAPIView +from plane.app.serializers import IssueSerializer +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + Issue, + IssueLink, + IssueAttachment, +) +from plane.bgtasks.issue_activites_task import issue_activity +from collections import defaultdict + + +class SubIssuesEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + @method_decorator(gzip_page) + def get(self, request, slug, project_id, issue_id): + sub_issues = ( + Issue.issue_objects.filter( + parent_id=issue_id, workspace__slug=slug + ) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .annotate(state_group=F("state__group")) + ) + + # create's a dict with state group name with their respective issue id's + result = defaultdict(list) + for sub_issue in sub_issues: + result[sub_issue.state_group].append(str(sub_issue.id)) + + sub_issues = sub_issues.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response( + { + "sub_issues": sub_issues, + "state_distribution": result, + }, + status=status.HTTP_200_OK, + ) + + # Assign multiple sub issues + def post(self, request, slug, project_id, issue_id): + parent_issue = Issue.issue_objects.get(pk=issue_id) + sub_issue_ids = request.data.get("sub_issue_ids", []) + + if not len(sub_issue_ids): + return Response( + {"error": "Sub Issue IDs are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids) + + for sub_issue in sub_issues: + sub_issue.parent = parent_issue + + _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10) + + updated_sub_issues = Issue.issue_objects.filter( + id__in=sub_issue_ids + ).annotate(state_group=F("state__group")) + + # Track the issue + _ = [ + issue_activity.delay( + type="issue.activity.updated", + requested_data=json.dumps({"parent": str(issue_id)}), + actor_id=str(request.user.id), + issue_id=str(sub_issue_id), + project_id=str(project_id), + current_instance=json.dumps({"parent": str(sub_issue_id)}), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + for sub_issue_id in sub_issue_ids + ] + + # create's a dict with state group name with their respective issue id's + result = defaultdict(list) + for sub_issue in updated_sub_issues: + result[sub_issue.state_group].append(str(sub_issue.id)) + + serializer = IssueSerializer( + updated_sub_issues, + many=True, + ) + return Response( + { + "sub_issues": serializer.data, + "state_distribution": result, + }, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/issue/subscriber.py b/apiserver/plane/app/views/issue/subscriber.py new file mode 100644 index 00000000000..dc727de285e --- /dev/null +++ b/apiserver/plane/app/views/issue/subscriber.py @@ -0,0 +1,125 @@ +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet +from plane.app.serializers import ( + IssueSubscriberSerializer, + ProjectMemberLiteSerializer, +) +from plane.app.permissions import ( + ProjectEntityPermission, + ProjectLitePermission, +) +from plane.db.models import ( + IssueSubscriber, + ProjectMember, +) + + +class IssueSubscriberViewSet(BaseViewSet): + serializer_class = IssueSubscriberSerializer + model = IssueSubscriber + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_permissions(self): + if self.action in ["subscribe", "unsubscribe", "subscription_status"]: + self.permission_classes = [ + ProjectLitePermission, + ] + else: + self.permission_classes = [ + ProjectEntityPermission, + ] + + return super(IssueSubscriberViewSet, self).get_permissions() + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + issue_id=self.kwargs.get("issue_id"), + ) + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .order_by("-created_at") + .distinct() + ) + + def list(self, request, slug, project_id, issue_id): + members = ProjectMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + is_active=True, + ).select_related("member") + serializer = ProjectMemberLiteSerializer(members, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + + def destroy(self, request, slug, project_id, issue_id, subscriber_id): + issue_subscriber = IssueSubscriber.objects.get( + project=project_id, + subscriber=subscriber_id, + workspace__slug=slug, + issue=issue_id, + ) + issue_subscriber.delete() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + def subscribe(self, request, slug, project_id, issue_id): + if IssueSubscriber.objects.filter( + issue_id=issue_id, + subscriber=request.user, + workspace__slug=slug, + project=project_id, + ).exists(): + return Response( + {"message": "User already subscribed to the issue."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + subscriber = IssueSubscriber.objects.create( + issue_id=issue_id, + subscriber_id=request.user.id, + project_id=project_id, + ) + serializer = IssueSubscriberSerializer(subscriber) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def unsubscribe(self, request, slug, project_id, issue_id): + issue_subscriber = IssueSubscriber.objects.get( + project=project_id, + subscriber=request.user, + workspace__slug=slug, + issue=issue_id, + ) + issue_subscriber.delete() + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + def subscription_status(self, request, slug, project_id, issue_id): + issue_subscriber = IssueSubscriber.objects.filter( + issue=issue_id, + subscriber=request.user, + workspace__slug=slug, + project=project_id, + ).exists() + return Response( + {"subscribed": issue_subscriber}, status=status.HTTP_200_OK + ) diff --git a/apiserver/plane/app/views/module.py b/apiserver/plane/app/views/module/base.py similarity index 69% rename from apiserver/plane/app/views/module.py rename to apiserver/plane/app/views/module/base.py index 3b52db64f9e..3fe3a078acb 100644 --- a/apiserver/plane/app/views/module.py +++ b/apiserver/plane/app/views/module/base.py @@ -1,51 +1,57 @@ # Python imports import json -# Django Imports -from django.utils import timezone -from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q -from django.utils.decorators import method_decorator -from django.views.decorators.gzip import gzip_page from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField +from django.db.models import ( + Count, + Exists, + F, + Func, + IntegerField, + OuterRef, + Prefetch, + Q, + Subquery, + UUIDField, + Value, +) from django.db.models.functions import Coalesce +# Django Imports +from django.utils import timezone +from rest_framework import status + # Third party imports from rest_framework.response import Response -from rest_framework import status -# Module imports -from . import BaseViewSet, BaseAPIView, WebhookMixin -from plane.app.serializers import ( - ModuleWriteSerializer, - ModuleSerializer, - ModuleIssueSerializer, - ModuleLinkSerializer, - ModuleFavoriteSerializer, - IssueSerializer, - ModuleUserPropertiesSerializer, - ModuleDetailSerializer, -) from plane.app.permissions import ( ProjectEntityPermission, ProjectLitePermission, ) +from plane.app.serializers import ( + ModuleDetailSerializer, + ModuleFavoriteSerializer, + ModuleLinkSerializer, + ModuleSerializer, + ModuleUserPropertiesSerializer, + ModuleWriteSerializer, +) +from plane.bgtasks.issue_activites_task import issue_activity from plane.db.models import ( + Issue, Module, + ModuleFavorite, ModuleIssue, - Project, - Issue, ModuleLink, - ModuleFavorite, - IssueLink, - IssueAttachment, ModuleUserProperties, + Project, ) -from plane.bgtasks.issue_activites_task import issue_activity -from plane.utils.issue_filters import issue_filters from plane.utils.analytics_plot import burndown_plot +# Module imports +from .. import BaseAPIView, BaseViewSet, WebhookMixin + class ModuleViewSet(WebhookMixin, BaseViewSet): model = Module @@ -68,6 +74,59 @@ def get_queryset(self): project_id=self.kwargs.get("project_id"), workspace__slug=self.kwargs.get("slug"), ) + cancelled_issues = ( + Issue.issue_objects.filter( + state__group="cancelled", + issue_module__module_id=OuterRef("pk"), + ) + .values("issue_module__module_id") + .annotate(cnt=Count("pk")) + .values("cnt") + ) + completed_issues = ( + Issue.issue_objects.filter( + state__group="completed", + issue_module__module_id=OuterRef("pk"), + ) + .values("issue_module__module_id") + .annotate(cnt=Count("pk")) + .values("cnt") + ) + started_issues = ( + Issue.issue_objects.filter( + state__group="started", + issue_module__module_id=OuterRef("pk"), + ) + .values("issue_module__module_id") + .annotate(cnt=Count("pk")) + .values("cnt") + ) + unstarted_issues = ( + Issue.issue_objects.filter( + state__group="unstarted", + issue_module__module_id=OuterRef("pk"), + ) + .values("issue_module__module_id") + .annotate(cnt=Count("pk")) + .values("cnt") + ) + backlog_issues = ( + Issue.issue_objects.filter( + state__group="backlog", + issue_module__module_id=OuterRef("pk"), + ) + .values("issue_module__module_id") + .annotate(cnt=Count("pk")) + .values("cnt") + ) + total_issues = ( + Issue.issue_objects.filter( + issue_module__module_id=OuterRef("pk"), + ) + .values("issue_module__module_id") + .annotate(cnt=Count("pk")) + .values("cnt") + ) return ( super() .get_queryset() @@ -87,62 +146,39 @@ def get_queryset(self): ) ) .annotate( - total_issues=Count( - "issue_module", - filter=Q( - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ), + completed_issues=Coalesce( + Subquery(completed_issues[:1]), + Value(0, output_field=IntegerField()), + ) ) .annotate( - completed_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="completed", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), + cancelled_issues=Coalesce( + Subquery(cancelled_issues[:1]), + Value(0, output_field=IntegerField()), ) ) .annotate( - cancelled_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="cancelled", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), + started_issues=Coalesce( + Subquery(started_issues[:1]), + Value(0, output_field=IntegerField()), ) ) .annotate( - started_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="started", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), + unstarted_issues=Coalesce( + Subquery(unstarted_issues[:1]), + Value(0, output_field=IntegerField()), ) ) .annotate( - unstarted_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="unstarted", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), + backlog_issues=Coalesce( + Subquery(backlog_issues[:1]), + Value(0, output_field=IntegerField()), ) ) .annotate( - backlog_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="backlog", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), + total_issues=Coalesce( + Subquery(total_issues[:1]), + Value(0, output_field=IntegerField()), ) ) .annotate( @@ -190,9 +226,9 @@ def create(self, request, slug, project_id): "external_id", # computed fields "is_favorite", - "total_issues", "cancelled_issues", "completed_issues", + "total_issues", "started_issues", "unstarted_issues", "backlog_issues", @@ -204,7 +240,7 @@ def create(self, request, slug, project_id): return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def list(self, request, slug, project_id): - queryset = self.get_queryset() + queryset = self.get_queryset().filter(archived_at__isnull=True) if self.fields: modules = ModuleSerializer( queryset, @@ -231,8 +267,8 @@ def list(self, request, slug, project_id): "external_source", "external_id", # computed fields - "is_favorite", "total_issues", + "is_favorite", "cancelled_issues", "completed_issues", "started_issues", @@ -244,7 +280,21 @@ def list(self, request, slug, project_id): return Response(modules, status=status.HTTP_200_OK) def retrieve(self, request, slug, project_id, pk): - queryset = self.get_queryset().filter(pk=pk) + queryset = ( + self.get_queryset() + .filter(archived_at__isnull=True) + .filter(pk=pk) + .annotate( + sub_issues=Issue.issue_objects.filter( + project_id=self.kwargs.get("project_id"), + parent__isnull=False, + issue_module__module_id=pk, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) assignee_distribution = ( Issue.objects.filter( @@ -345,9 +395,11 @@ def retrieve(self, request, slug, project_id, pk): "completion_chart": {}, } - if queryset.first().start_date and queryset.first().target_date: + # Fetch the modules + modules = queryset.first() + if modules and modules.start_date and modules.target_date: data["distribution"]["completion_chart"] = burndown_plot( - queryset=queryset.first(), + queryset=modules, slug=slug, project_id=project_id, module_id=pk, @@ -359,14 +411,20 @@ def retrieve(self, request, slug, project_id, pk): ) def partial_update(self, request, slug, project_id, pk): - queryset = self.get_queryset().filter(pk=pk) + module = self.get_queryset().filter(pk=pk) + + if module.first().archived_at: + return Response( + {"error": "Archived module cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) serializer = ModuleWriteSerializer( - queryset.first(), data=request.data, partial=True + module.first(), data=request.data, partial=True ) if serializer.is_valid(): serializer.save() - module = queryset.values( + module = module.values( # Required fields "id", "workspace_id", @@ -387,10 +445,10 @@ def partial_update(self, request, slug, project_id, pk): "external_id", # computed fields "is_favorite", - "total_issues", "cancelled_issues", "completed_issues", "started_issues", + "total_issues", "unstarted_issues", "backlog_issues", "created_at", @@ -426,260 +484,196 @@ def destroy(self, request, slug, project_id, pk): return Response(status=status.HTTP_204_NO_CONTENT) -class ModuleIssueViewSet(WebhookMixin, BaseViewSet): - serializer_class = ModuleIssueSerializer - model = ModuleIssue - webhook_event = "module_issue" - bulk = True - - filterset_fields = [ - "issue__labels__id", - "issue__assignees__id", +class ModuleLinkViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, ] + model = ModuleLink + serializer_class = ModuleLinkSerializer + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + module_id=self.kwargs.get("module_id"), + ) + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(module_id=self.kwargs.get("module_id")) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .order_by("-created_at") + .distinct() + ) + + +class ModuleArchiveUnarchiveEndpoint(BaseAPIView): + permission_classes = [ ProjectEntityPermission, ] def get_queryset(self): + favorite_subquery = ModuleFavorite.objects.filter( + user=self.request.user, + module_id=OuterRef("pk"), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + ) return ( - Issue.issue_objects.filter( - project_id=self.kwargs.get("project_id"), - workspace__slug=self.kwargs.get("slug"), - issue_module__module_id=self.kwargs.get("module_id"), + Module.objects.filter(workspace__slug=self.kwargs.get("slug")) + .filter(archived_at__isnull=False) + .annotate(is_favorite=Exists(favorite_subquery)) + .select_related("project") + .select_related("workspace") + .select_related("lead") + .prefetch_related("members") + .prefetch_related( + Prefetch( + "link_module", + queryset=ModuleLink.objects.select_related( + "module", "created_by" + ), + ) ) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ), ) .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") + completed_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") + cancelled_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), + started_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), + distinct=True, + ) + ) + .annotate( + unstarted_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( + distinct=True, + ) + ) + .annotate( + backlog_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + member_ids=Coalesce( ArrayAgg( - "issue_module__module_id", + "members__id", distinct=True, - filter=~Q(issue_module__module_id__isnull=True), + filter=~Q(members__id__isnull=True), ), Value([], output_field=ArrayField(UUIDField())), - ), - ) - ).distinct() - - @method_decorator(gzip_page) - def list(self, request, slug, project_id, module_id): - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] - filters = issue_filters(request.query_params, "GET") - issue_queryset = self.get_queryset().filter(**filters) - if self.fields or self.expand: - issues = IssueSerializer( - issue_queryset, many=True, fields=fields if fields else None - ).data - else: - issues = issue_queryset.values( - "id", - "name", - "state_id", - "sort_order", - "completed_at", - "estimate_point", - "priority", - "start_date", - "target_date", - "sequence_id", - "project_id", - "parent_id", - "cycle_id", - "module_ids", - "label_ids", - "assignee_ids", - "sub_issues_count", - "created_at", - "updated_at", - "created_by", - "updated_by", - "attachment_count", - "link_count", - "is_draft", - "archived_at", - ) - return Response(issues, status=status.HTTP_200_OK) - - # create multiple issues inside a module - def create_module_issues(self, request, slug, project_id, module_id): - issues = request.data.get("issues", []) - if not issues: - return Response( - {"error": "Issues are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - project = Project.objects.get(pk=project_id) - _ = ModuleIssue.objects.bulk_create( - [ - ModuleIssue( - issue_id=str(issue), - module_id=module_id, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, ) - for issue in issues - ], - batch_size=10, - ignore_conflicts=True, - ) - # Bulk Update the activity - _ = [ - issue_activity.delay( - type="module.activity.created", - requested_data=json.dumps({"module_id": str(module_id)}), - actor_id=str(request.user.id), - issue_id=str(issue), - project_id=project_id, - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - for issue in issues - ] - return Response({"message": "success"}, status=status.HTTP_201_CREATED) - - # create multiple module inside an issue - def create_issue_modules(self, request, slug, project_id, issue_id): - modules = request.data.get("modules", []) - if not modules: - return Response( - {"error": "Modules are required"}, - status=status.HTTP_400_BAD_REQUEST, ) - - project = Project.objects.get(pk=project_id) - _ = ModuleIssue.objects.bulk_create( - [ - ModuleIssue( - issue_id=issue_id, - module_id=module, - project_id=project_id, - workspace_id=project.workspace_id, - created_by=request.user, - updated_by=request.user, - ) - for module in modules - ], - batch_size=10, - ignore_conflicts=True, + .order_by("-is_favorite", "-created_at") ) - # Bulk Update the activity - _ = [ - issue_activity.delay( - type="module.activity.created", - requested_data=json.dumps({"module_id": module}), - actor_id=str(request.user.id), - issue_id=issue_id, - project_id=project_id, - current_instance=None, - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - for module in modules - ] - - return Response({"message": "success"}, status=status.HTTP_201_CREATED) - def destroy(self, request, slug, project_id, module_id, issue_id): - module_issue = ModuleIssue.objects.get( - workspace__slug=slug, - project_id=project_id, - module_id=module_id, - issue_id=issue_id, - ) - issue_activity.delay( - type="module.activity.deleted", - requested_data=json.dumps({"module_id": str(module_id)}), - actor_id=str(request.user.id), - issue_id=str(issue_id), - project_id=str(project_id), - current_instance=json.dumps( - {"module_name": module_issue.module.name} - ), - epoch=int(timezone.now().timestamp()), - notification=True, - origin=request.META.get("HTTP_ORIGIN"), - ) - module_issue.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class ModuleLinkViewSet(BaseViewSet): - permission_classes = [ - ProjectEntityPermission, - ] - - model = ModuleLink - serializer_class = ModuleLinkSerializer + def get(self, request, slug, project_id): + queryset = self.get_queryset() + modules = queryset.values( # Required fields + "id", + "workspace_id", + "project_id", + # Model fields + "name", + "description", + "description_text", + "description_html", + "start_date", + "target_date", + "status", + "lead_id", + "member_ids", + "view_props", + "sort_order", + "external_source", + "external_id", + # computed fields + "total_issues", + "is_favorite", + "cancelled_issues", + "completed_issues", + "started_issues", + "unstarted_issues", + "backlog_issues", + "created_at", + "updated_at", + "archived_at", + ) + return Response(modules, status=status.HTTP_200_OK) - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - module_id=self.kwargs.get("module_id"), + def post(self, request, slug, project_id, module_id): + module = Module.objects.get( + pk=module_id, project_id=project_id, workspace__slug=slug + ) + module.archived_at = timezone.now() + module.save() + return Response( + {"archived_at": str(module.archived_at)}, + status=status.HTTP_200_OK, ) - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(module_id=self.kwargs.get("module_id")) - .filter( - project__project_projectmember__member=self.request.user, - project__project_projectmember__is_active=True, - ) - .order_by("-created_at") - .distinct() + def delete(self, request, slug, project_id, module_id): + module = Module.objects.get( + pk=module_id, project_id=project_id, workspace__slug=slug ) + module.archived_at = None + module.save() + return Response(status=status.HTTP_204_NO_CONTENT) class ModuleFavoriteViewSet(BaseViewSet): diff --git a/apiserver/plane/app/views/module/issue.py b/apiserver/plane/app/views/module/issue.py new file mode 100644 index 00000000000..d2643334079 --- /dev/null +++ b/apiserver/plane/app/views/module/issue.py @@ -0,0 +1,260 @@ +# Python imports +import json + +# Django Imports +from django.utils import timezone +from django.db.models import F, OuterRef, Func, Q +from django.utils.decorators import method_decorator +from django.views.decorators.gzip import gzip_page +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models import Value, UUIDField +from django.db.models.functions import Coalesce + +# Third party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .. import BaseViewSet, WebhookMixin +from plane.app.serializers import ( + ModuleIssueSerializer, + IssueSerializer, +) +from plane.app.permissions import ProjectEntityPermission +from plane.db.models import ( + ModuleIssue, + Project, + Issue, + IssueLink, + IssueAttachment, +) +from plane.bgtasks.issue_activites_task import issue_activity +from plane.utils.issue_filters import issue_filters + + +class ModuleIssueViewSet(WebhookMixin, BaseViewSet): + serializer_class = ModuleIssueSerializer + model = ModuleIssue + webhook_event = "module_issue" + bulk = True + + filterset_fields = [ + "issue__labels__id", + "issue__assignees__id", + ] + + permission_classes = [ + ProjectEntityPermission, + ] + + def get_queryset(self): + return ( + Issue.issue_objects.filter( + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + issue_module__module_id=self.kwargs.get("module_id"), + ) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + ).distinct() + + @method_decorator(gzip_page) + def list(self, request, slug, project_id, module_id): + fields = [ + field + for field in request.GET.get("fields", "").split(",") + if field + ] + filters = issue_filters(request.query_params, "GET") + issue_queryset = self.get_queryset().filter(**filters) + if self.fields or self.expand: + issues = IssueSerializer( + issue_queryset, many=True, fields=fields if fields else None + ).data + else: + issues = issue_queryset.values( + "id", + "name", + "state_id", + "sort_order", + "completed_at", + "estimate_point", + "priority", + "start_date", + "target_date", + "sequence_id", + "project_id", + "parent_id", + "cycle_id", + "module_ids", + "label_ids", + "assignee_ids", + "sub_issues_count", + "created_at", + "updated_at", + "created_by", + "updated_by", + "attachment_count", + "link_count", + "is_draft", + "archived_at", + ) + return Response(issues, status=status.HTTP_200_OK) + + # create multiple issues inside a module + def create_module_issues(self, request, slug, project_id, module_id): + issues = request.data.get("issues", []) + if not issues: + return Response( + {"error": "Issues are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + project = Project.objects.get(pk=project_id) + _ = ModuleIssue.objects.bulk_create( + [ + ModuleIssue( + issue_id=str(issue), + module_id=module_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for issue in issues + ], + batch_size=10, + ignore_conflicts=True, + ) + # Bulk Update the activity + _ = [ + issue_activity.delay( + type="module.activity.created", + requested_data=json.dumps({"module_id": str(module_id)}), + actor_id=str(request.user.id), + issue_id=str(issue), + project_id=project_id, + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + for issue in issues + ] + return Response({"message": "success"}, status=status.HTTP_201_CREATED) + + # create multiple module inside an issue + def create_issue_modules(self, request, slug, project_id, issue_id): + modules = request.data.get("modules", []) + if not modules: + return Response( + {"error": "Modules are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + project = Project.objects.get(pk=project_id) + _ = ModuleIssue.objects.bulk_create( + [ + ModuleIssue( + issue_id=issue_id, + module_id=module, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for module in modules + ], + batch_size=10, + ignore_conflicts=True, + ) + # Bulk Update the activity + _ = [ + issue_activity.delay( + type="module.activity.created", + requested_data=json.dumps({"module_id": module}), + actor_id=str(request.user.id), + issue_id=issue_id, + project_id=project_id, + current_instance=None, + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + for module in modules + ] + + return Response({"message": "success"}, status=status.HTTP_201_CREATED) + + def destroy(self, request, slug, project_id, module_id, issue_id): + module_issue = ModuleIssue.objects.get( + workspace__slug=slug, + project_id=project_id, + module_id=module_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps({"module_id": str(module_id)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=json.dumps( + {"module_name": module_issue.module.name} + ), + epoch=int(timezone.now().timestamp()), + notification=True, + origin=request.META.get("HTTP_ORIGIN"), + ) + module_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/notification.py b/apiserver/plane/app/views/notification/base.py similarity index 98% rename from apiserver/plane/app/views/notification.py rename to apiserver/plane/app/views/notification/base.py index ebe8e508220..8dae618dbcf 100644 --- a/apiserver/plane/app/views/notification.py +++ b/apiserver/plane/app/views/notification/base.py @@ -8,7 +8,7 @@ from plane.utils.paginator import BasePaginator # Module imports -from .base import BaseViewSet, BaseAPIView +from ..base import BaseViewSet, BaseAPIView from plane.db.models import ( Notification, IssueAssignee, @@ -17,7 +17,10 @@ WorkspaceMember, UserNotificationPreference, ) -from plane.app.serializers import NotificationSerializer, UserNotificationPreferenceSerializer +from plane.app.serializers import ( + NotificationSerializer, + UserNotificationPreferenceSerializer, +) class NotificationViewSet(BaseViewSet, BasePaginator): diff --git a/apiserver/plane/app/views/oauth.py b/apiserver/plane/app/views/oauth.py index 8152fb0eee4..48630175ab3 100644 --- a/apiserver/plane/app/views/oauth.py +++ b/apiserver/plane/app/views/oauth.py @@ -5,7 +5,6 @@ # Django imports from django.utils import timezone -from django.conf import settings # Third Party modules from rest_framework.response import Response @@ -250,9 +249,11 @@ def post(self, request): [ WorkspaceMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) @@ -266,9 +267,11 @@ def post(self, request): [ ProjectMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) @@ -391,9 +394,11 @@ def post(self, request): [ WorkspaceMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) @@ -407,9 +412,11 @@ def post(self, request): [ ProjectMember( workspace_id=project_member_invite.workspace_id, - role=project_member_invite.role - if project_member_invite.role in [5, 10, 15] - else 15, + role=( + project_member_invite.role + if project_member_invite.role in [5, 10, 15] + else 15 + ), member=user, created_by_id=project_member_invite.created_by_id, ) diff --git a/apiserver/plane/app/views/page.py b/apiserver/plane/app/views/page/base.py similarity index 96% rename from apiserver/plane/app/views/page.py rename to apiserver/plane/app/views/page/base.py index 7ecf22fa847..d60d78500ce 100644 --- a/apiserver/plane/app/views/page.py +++ b/apiserver/plane/app/views/page/base.py @@ -1,25 +1,32 @@ # Python imports -from datetime import date, datetime, timedelta +from datetime import datetime # Django imports from django.db import connection from django.db.models import Exists, OuterRef, Q -from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.gzip import gzip_page + # Third party imports from rest_framework import status from rest_framework.response import Response from plane.app.permissions import ProjectEntityPermission -from plane.app.serializers import (IssueLiteSerializer, PageFavoriteSerializer, - PageLogSerializer, PageSerializer, - SubPageSerializer) -from plane.db.models import (Issue, IssueActivity, IssueAssignee, Page, - PageFavorite, PageLog, ProjectMember) +from plane.app.serializers import ( + PageFavoriteSerializer, + PageLogSerializer, + PageSerializer, + SubPageSerializer, +) +from plane.db.models import ( + Page, + PageFavorite, + PageLog, + ProjectMember, +) # Module imports -from .base import BaseAPIView, BaseViewSet +from ..base import BaseAPIView, BaseViewSet def unarchive_archive_page_and_descendants(page_id, archived_at): @@ -63,6 +70,7 @@ def get_queryset(self): .filter( project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, ) .filter(parent__isnull=True) .filter(Q(owned_by=self.request.user) | Q(access=0)) diff --git a/apiserver/plane/app/views/project.py b/apiserver/plane/app/views/project.py deleted file mode 100644 index 6f9b2618e19..00000000000 --- a/apiserver/plane/app/views/project.py +++ /dev/null @@ -1,1139 +0,0 @@ -# Python imports -import jwt -import boto3 -from datetime import datetime - -# Django imports -from django.core.exceptions import ValidationError -from django.db import IntegrityError -from django.db.models import ( - Prefetch, - Q, - Exists, - OuterRef, - F, - Func, - Subquery, -) -from django.core.validators import validate_email -from django.conf import settings -from django.utils import timezone - -# Third Party imports -from rest_framework.response import Response -from rest_framework import status -from rest_framework import serializers -from rest_framework.permissions import AllowAny - -# Module imports -from .base import BaseViewSet, BaseAPIView, WebhookMixin -from plane.app.serializers import ( - ProjectSerializer, - ProjectListSerializer, - ProjectMemberSerializer, - ProjectDetailSerializer, - ProjectMemberInviteSerializer, - ProjectFavoriteSerializer, - ProjectDeployBoardSerializer, - ProjectMemberAdminSerializer, - ProjectMemberRoleSerializer, -) - -from plane.app.permissions import ( - WorkspaceUserPermission, - ProjectBasePermission, - ProjectMemberPermission, - ProjectLitePermission, -) - -from plane.db.models import ( - Project, - ProjectMember, - Workspace, - ProjectMemberInvite, - User, - WorkspaceMember, - State, - TeamMember, - ProjectFavorite, - ProjectIdentifier, - Module, - Cycle, - Inbox, - ProjectDeployBoard, - IssueProperty, -) - -from plane.bgtasks.project_invitation_task import project_invitation - - -class ProjectViewSet(WebhookMixin, BaseViewSet): - serializer_class = ProjectListSerializer - model = Project - webhook_event = "project" - - permission_classes = [ - ProjectBasePermission, - ] - - def get_queryset(self): - sort_order = ProjectMember.objects.filter( - member=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - is_active=True, - ).values("sort_order") - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter( - Q(project_projectmember__member=self.request.user) - | Q(network=2) - ) - .select_related( - "workspace", - "workspace__owner", - "default_assignee", - "project_lead", - ) - .annotate( - is_favorite=Exists( - ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) - ) - ) - .annotate( - is_member=Exists( - ProjectMember.objects.filter( - member=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - is_active=True, - ) - ) - ) - .annotate( - total_members=ProjectMember.objects.filter( - project_id=OuterRef("id"), - member__is_bot=False, - is_active=True, - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_modules=Module.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - member_role=ProjectMember.objects.filter( - project_id=OuterRef("pk"), - member_id=self.request.user.id, - is_active=True, - ).values("role") - ) - .annotate( - is_deployed=Exists( - ProjectDeployBoard.objects.filter( - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) - ) - ) - .annotate(sort_order=Subquery(sort_order)) - .prefetch_related( - Prefetch( - "project_projectmember", - queryset=ProjectMember.objects.filter( - workspace__slug=self.kwargs.get("slug"), - is_active=True, - ).select_related("member"), - to_attr="members_list", - ) - ) - .distinct() - ) - - def list(self, request, slug): - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] - projects = ( - self.get_queryset() - .order_by("sort_order", "name") - ) - if request.GET.get("per_page", False) and request.GET.get( - "cursor", False - ): - return self.paginate( - request=request, - queryset=(projects), - on_results=lambda projects: ProjectListSerializer( - projects, many=True - ).data, - ) - projects = ProjectListSerializer( - projects, many=True, fields=fields if fields else None - ).data - return Response(projects, status=status.HTTP_200_OK) - - def create(self, request, slug): - try: - workspace = Workspace.objects.get(slug=slug) - - serializer = ProjectSerializer( - data={**request.data}, context={"workspace_id": workspace.id} - ) - if serializer.is_valid(): - serializer.save() - - # Add the user as Administrator to the project - _ = ProjectMember.objects.create( - project_id=serializer.data["id"], - member=request.user, - role=20, - ) - # Also create the issue property for the user - _ = IssueProperty.objects.create( - project_id=serializer.data["id"], - user=request.user, - ) - - if serializer.data["project_lead"] is not None and str( - serializer.data["project_lead"] - ) != str(request.user.id): - ProjectMember.objects.create( - project_id=serializer.data["id"], - member_id=serializer.data["project_lead"], - role=20, - ) - # Also create the issue property for the user - IssueProperty.objects.create( - project_id=serializer.data["id"], - user_id=serializer.data["project_lead"], - ) - - # Default states - states = [ - { - "name": "Backlog", - "color": "#A3A3A3", - "sequence": 15000, - "group": "backlog", - "default": True, - }, - { - "name": "Todo", - "color": "#3A3A3A", - "sequence": 25000, - "group": "unstarted", - }, - { - "name": "In Progress", - "color": "#F59E0B", - "sequence": 35000, - "group": "started", - }, - { - "name": "Done", - "color": "#16A34A", - "sequence": 45000, - "group": "completed", - }, - { - "name": "Cancelled", - "color": "#EF4444", - "sequence": 55000, - "group": "cancelled", - }, - ] - - State.objects.bulk_create( - [ - State( - name=state["name"], - color=state["color"], - project=serializer.instance, - sequence=state["sequence"], - workspace=serializer.instance.workspace, - group=state["group"], - default=state.get("default", False), - created_by=request.user, - ) - for state in states - ] - ) - - project = ( - self.get_queryset() - .filter(pk=serializer.data["id"]) - .first() - ) - serializer = ProjectListSerializer(project) - return Response( - serializer.data, status=status.HTTP_201_CREATED - ) - return Response( - serializer.errors, - status=status.HTTP_400_BAD_REQUEST, - ) - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"name": "The project name is already taken"}, - status=status.HTTP_410_GONE, - ) - except Workspace.DoesNotExist as e: - return Response( - {"error": "Workspace does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except serializers.ValidationError as e: - return Response( - {"identifier": "The project identifier is already taken"}, - status=status.HTTP_410_GONE, - ) - - def partial_update(self, request, slug, pk=None): - try: - workspace = Workspace.objects.get(slug=slug) - - project = Project.objects.get(pk=pk) - - serializer = ProjectSerializer( - project, - data={**request.data}, - context={"workspace_id": workspace.id}, - partial=True, - ) - - if serializer.is_valid(): - serializer.save() - if serializer.data["inbox_view"]: - Inbox.objects.get_or_create( - name=f"{project.name} Inbox", - project=project, - is_default=True, - ) - - # Create the triage state in Backlog group - State.objects.get_or_create( - name="Triage", - group="backlog", - description="Default state for managing all Inbox Issues", - project_id=pk, - color="#ff7700", - ) - - project = ( - self.get_queryset() - .filter(pk=serializer.data["id"]) - .first() - ) - serializer = ProjectListSerializer(project) - return Response(serializer.data, status=status.HTTP_200_OK) - return Response( - serializer.errors, status=status.HTTP_400_BAD_REQUEST - ) - - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"name": "The project name is already taken"}, - status=status.HTTP_410_GONE, - ) - except (Project.DoesNotExist, Workspace.DoesNotExist): - return Response( - {"error": "Project does not exist"}, - status=status.HTTP_404_NOT_FOUND, - ) - except serializers.ValidationError as e: - return Response( - {"identifier": "The project identifier is already taken"}, - status=status.HTTP_410_GONE, - ) - - -class ProjectInvitationsViewset(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - search_fields = [] - - permission_classes = [ - ProjectBasePermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .select_related("project") - .select_related("workspace", "workspace__owner") - ) - - def create(self, request, slug, project_id): - emails = request.data.get("emails", []) - - # Check if email is provided - if not emails: - return Response( - {"error": "Emails are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - requesting_user = ProjectMember.objects.get( - workspace__slug=slug, - project_id=project_id, - member_id=request.user.id, - ) - - # Check if any invited user has an higher role - if len( - [ - email - for email in emails - if int(email.get("role", 10)) > requesting_user.role - ] - ): - return Response( - {"error": "You cannot invite a user with higher role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - project_invitations = [] - for email in emails: - try: - validate_email(email.get("email")) - project_invitations.append( - ProjectMemberInvite( - email=email.get("email").strip().lower(), - project_id=project_id, - workspace_id=workspace.id, - token=jwt.encode( - { - "email": email, - "timestamp": datetime.now().timestamp(), - }, - settings.SECRET_KEY, - algorithm="HS256", - ), - role=email.get("role", 10), - created_by=request.user, - ) - ) - except ValidationError: - return Response( - { - "error": f"Invalid email - {email} provided a valid email address is required to send the invite" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Create workspace member invite - project_invitations = ProjectMemberInvite.objects.bulk_create( - project_invitations, batch_size=10, ignore_conflicts=True - ) - current_site = request.META.get("HTTP_ORIGIN") - - # Send invitations - for invitation in project_invitations: - project_invitations.delay( - invitation.email, - project_id, - invitation.token, - current_site, - request.user.email, - ) - - return Response( - { - "message": "Email sent successfully", - }, - status=status.HTTP_200_OK, - ) - - -class UserProjectInvitationsViewset(BaseViewSet): - serializer_class = ProjectMemberInviteSerializer - model = ProjectMemberInvite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(email=self.request.user.email) - .select_related("workspace", "workspace__owner", "project") - ) - - def create(self, request, slug): - project_ids = request.data.get("project_ids", []) - - # Get the workspace user role - workspace_member = WorkspaceMember.objects.get( - member=request.user, - workspace__slug=slug, - is_active=True, - ) - - workspace_role = workspace_member.role - workspace = workspace_member.workspace - - # If the user was already part of workspace - _ = ProjectMember.objects.filter( - workspace__slug=slug, - project_id__in=project_ids, - member=request.user, - ).update(is_active=True) - - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project_id=project_id, - member=request.user, - role=15 if workspace_role >= 15 else 10, - workspace=workspace, - created_by=request.user, - ) - for project_id in project_ids - ], - ignore_conflicts=True, - ) - - IssueProperty.objects.bulk_create( - [ - IssueProperty( - project_id=project_id, - user=request.user, - workspace=workspace, - created_by=request.user, - ) - for project_id in project_ids - ], - ignore_conflicts=True, - ) - - return Response( - {"message": "Projects joined successfully"}, - status=status.HTTP_201_CREATED, - ) - - -class ProjectJoinEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def post(self, request, slug, project_id, pk): - project_invite = ProjectMemberInvite.objects.get( - pk=pk, - project_id=project_id, - workspace__slug=slug, - ) - - email = request.data.get("email", "") - - if email == "" or project_invite.email != email: - return Response( - {"error": "You do not have permission to join the project"}, - status=status.HTTP_403_FORBIDDEN, - ) - - if project_invite.responded_at is None: - project_invite.accepted = request.data.get("accepted", False) - project_invite.responded_at = timezone.now() - project_invite.save() - - if project_invite.accepted: - # Check if the user account exists - user = User.objects.filter(email=email).first() - - # Check if user is a part of workspace - workspace_member = WorkspaceMember.objects.filter( - workspace__slug=slug, member=user - ).first() - # Add him to workspace - if workspace_member is None: - _ = WorkspaceMember.objects.create( - workspace_id=project_invite.workspace_id, - member=user, - role=15 - if project_invite.role >= 15 - else project_invite.role, - ) - else: - # Else make him active - workspace_member.is_active = True - workspace_member.save() - - # Check if the user was already a member of project then activate the user - project_member = ProjectMember.objects.filter( - workspace_id=project_invite.workspace_id, member=user - ).first() - if project_member is None: - # Create a Project Member - _ = ProjectMember.objects.create( - workspace_id=project_invite.workspace_id, - member=user, - role=project_invite.role, - ) - else: - project_member.is_active = True - project_member.role = project_member.role - project_member.save() - - return Response( - {"message": "Project Invitation Accepted"}, - status=status.HTTP_200_OK, - ) - - return Response( - {"message": "Project Invitation was not accepted"}, - status=status.HTTP_200_OK, - ) - - return Response( - {"error": "You have already responded to the invitation request"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request, slug, project_id, pk): - project_invitation = ProjectMemberInvite.objects.get( - workspace__slug=slug, project_id=project_id, pk=pk - ) - serializer = ProjectMemberInviteSerializer(project_invitation) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class ProjectMemberViewSet(BaseViewSet): - serializer_class = ProjectMemberAdminSerializer - model = ProjectMember - permission_classes = [ - ProjectMemberPermission, - ] - - def get_permissions(self): - if self.action == "leave": - self.permission_classes = [ - ProjectLitePermission, - ] - else: - self.permission_classes = [ - ProjectMemberPermission, - ] - - return super(ProjectMemberViewSet, self).get_permissions() - - search_fields = [ - "member__display_name", - "member__first_name", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(member__is_bot=False) - .filter() - .select_related("project") - .select_related("member") - .select_related("workspace", "workspace__owner") - ) - - def create(self, request, slug, project_id): - members = request.data.get("members", []) - - # get the project - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - if not len(members): - return Response( - {"error": "Atleast one member is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - bulk_project_members = [] - bulk_issue_props = [] - - project_members = ( - ProjectMember.objects.filter( - workspace__slug=slug, - member_id__in=[member.get("member_id") for member in members], - ) - .values("member_id", "sort_order") - .order_by("sort_order") - ) - - bulk_project_members = [] - member_roles = {member.get("member_id"): member.get("role") for member in members} - # Update roles in the members array based on the member_roles dictionary - for project_member in ProjectMember.objects.filter(project_id=project_id, member_id__in=[member.get("member_id") for member in members]): - project_member.role = member_roles[str(project_member.member_id)] - project_member.is_active = True - bulk_project_members.append(project_member) - - # Update the roles of the existing members - ProjectMember.objects.bulk_update( - bulk_project_members, ["is_active", "role"], batch_size=100 - ) - - for member in members: - sort_order = [ - project_member.get("sort_order") - for project_member in project_members - if str(project_member.get("member_id")) - == str(member.get("member_id")) - ] - bulk_project_members.append( - ProjectMember( - member_id=member.get("member_id"), - role=member.get("role", 10), - project_id=project_id, - workspace_id=project.workspace_id, - sort_order=sort_order[0] - 10000 - if len(sort_order) - else 65535, - ) - ) - bulk_issue_props.append( - IssueProperty( - user_id=member.get("member_id"), - project_id=project_id, - workspace_id=project.workspace_id, - ) - ) - - project_members = ProjectMember.objects.bulk_create( - bulk_project_members, - batch_size=10, - ignore_conflicts=True, - ) - - _ = IssueProperty.objects.bulk_create( - bulk_issue_props, batch_size=10, ignore_conflicts=True - ) - - project_members = ProjectMember.objects.filter(project_id=project_id, member_id__in=[member.get("member_id") for member in members]) - serializer = ProjectMemberRoleSerializer(project_members, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - def list(self, request, slug, project_id): - # Get the list of project members for the project - project_members = ProjectMember.objects.filter( - project_id=project_id, - workspace__slug=slug, - member__is_bot=False, - is_active=True, - ).select_related("project", "member", "workspace") - - serializer = ProjectMemberRoleSerializer( - project_members, fields=("id", "member", "role"), many=True - ) - return Response(serializer.data, status=status.HTTP_200_OK) - - def partial_update(self, request, slug, project_id, pk): - project_member = ProjectMember.objects.get( - pk=pk, - workspace__slug=slug, - project_id=project_id, - is_active=True, - ) - if request.user.id == project_member.member_id: - return Response( - {"error": "You cannot update your own role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - # Check while updating user roles - requested_project_member = ProjectMember.objects.get( - project_id=project_id, - workspace__slug=slug, - member=request.user, - is_active=True, - ) - if ( - "role" in request.data - and int(request.data.get("role", project_member.role)) - > requested_project_member.role - ): - return Response( - { - "error": "You cannot update a role that is higher than your own role" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = ProjectMemberSerializer( - project_member, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id, pk): - project_member = ProjectMember.objects.get( - workspace__slug=slug, - project_id=project_id, - pk=pk, - member__is_bot=False, - is_active=True, - ) - # check requesting user role - requesting_project_member = ProjectMember.objects.get( - workspace__slug=slug, - member=request.user, - project_id=project_id, - is_active=True, - ) - # User cannot remove himself - if str(project_member.id) == str(requesting_project_member.id): - return Response( - { - "error": "You cannot remove yourself from the workspace. Please use leave workspace" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # User cannot deactivate higher role - if requesting_project_member.role < project_member.role: - return Response( - { - "error": "You cannot remove a user having role higher than you" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - project_member.is_active = False - project_member.save() - return Response(status=status.HTTP_204_NO_CONTENT) - - def leave(self, request, slug, project_id): - project_member = ProjectMember.objects.get( - workspace__slug=slug, - project_id=project_id, - member=request.user, - is_active=True, - ) - - # Check if the leaving user is the only admin of the project - if ( - project_member.role == 20 - and not ProjectMember.objects.filter( - workspace__slug=slug, - project_id=project_id, - role=20, - is_active=True, - ).count() - > 1 - ): - return Response( - { - "error": "You cannot leave the project as your the only admin of the project you will have to either delete the project or create an another admin", - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Deactivate the user - project_member.is_active = False - project_member.save() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class AddTeamToProjectEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def post(self, request, slug, project_id): - team_members = TeamMember.objects.filter( - workspace__slug=slug, team__in=request.data.get("teams", []) - ).values_list("member", flat=True) - - if len(team_members) == 0: - return Response( - {"error": "No such team exists"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - project_members = [] - issue_props = [] - for member in team_members: - project_members.append( - ProjectMember( - project_id=project_id, - member_id=member, - workspace=workspace, - created_by=request.user, - ) - ) - issue_props.append( - IssueProperty( - project_id=project_id, - user_id=member, - workspace=workspace, - created_by=request.user, - ) - ) - - ProjectMember.objects.bulk_create( - project_members, batch_size=10, ignore_conflicts=True - ) - - _ = IssueProperty.objects.bulk_create( - issue_props, batch_size=10, ignore_conflicts=True - ) - - serializer = ProjectMemberSerializer(project_members, many=True) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - -class ProjectIdentifierEndpoint(BaseAPIView): - permission_classes = [ - ProjectBasePermission, - ] - - def get(self, request, slug): - name = request.GET.get("name", "").strip().upper() - - if name == "": - return Response( - {"error": "Name is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - exists = ProjectIdentifier.objects.filter( - name=name, workspace__slug=slug - ).values("id", "name", "project") - - return Response( - {"exists": len(exists), "identifiers": exists}, - status=status.HTTP_200_OK, - ) - - def delete(self, request, slug): - name = request.data.get("name", "").strip().upper() - - if name == "": - return Response( - {"error": "Name is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if Project.objects.filter( - identifier=name, workspace__slug=slug - ).exists(): - return Response( - { - "error": "Cannot delete an identifier of an existing project" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - ProjectIdentifier.objects.filter( - name=name, workspace__slug=slug - ).delete() - - return Response( - status=status.HTTP_204_NO_CONTENT, - ) - - -class ProjectUserViewsEndpoint(BaseAPIView): - def post(self, request, slug, project_id): - project = Project.objects.get(pk=project_id, workspace__slug=slug) - - project_member = ProjectMember.objects.filter( - member=request.user, - project=project, - is_active=True, - ).first() - - if project_member is None: - return Response( - {"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN - ) - - view_props = project_member.view_props - default_props = project_member.default_props - preferences = project_member.preferences - sort_order = project_member.sort_order - - project_member.view_props = request.data.get("view_props", view_props) - project_member.default_props = request.data.get( - "default_props", default_props - ) - project_member.preferences = request.data.get( - "preferences", preferences - ) - project_member.sort_order = request.data.get("sort_order", sort_order) - - project_member.save() - - return Response(status=status.HTTP_204_NO_CONTENT) - - -class ProjectMemberUserEndpoint(BaseAPIView): - def get(self, request, slug, project_id): - project_member = ProjectMember.objects.get( - project_id=project_id, - workspace__slug=slug, - member=request.user, - is_active=True, - ) - serializer = ProjectMemberSerializer(project_member) - - return Response(serializer.data, status=status.HTTP_200_OK) - - -class ProjectFavoritesViewSet(BaseViewSet): - serializer_class = ProjectFavoriteSerializer - model = ProjectFavorite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(user=self.request.user) - .select_related( - "project", "project__project_lead", "project__default_assignee" - ) - .select_related("workspace", "workspace__owner") - ) - - def perform_create(self, serializer): - serializer.save(user=self.request.user) - - def create(self, request, slug): - serializer = ProjectFavoriteSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(user=request.user) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, project_id): - project_favorite = ProjectFavorite.objects.get( - project=project_id, user=request.user, workspace__slug=slug - ) - project_favorite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class ProjectPublicCoverImagesEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - - def get(self, request): - files = [] - s3 = boto3.client( - "s3", - aws_access_key_id=settings.AWS_ACCESS_KEY_ID, - aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, - ) - params = { - "Bucket": settings.AWS_STORAGE_BUCKET_NAME, - "Prefix": "static/project-cover/", - } - - response = s3.list_objects_v2(**params) - # Extracting file keys from the response - if "Contents" in response: - for content in response["Contents"]: - if not content["Key"].endswith( - "/" - ): # This line ensures we're only getting files, not "sub-folders" - files.append( - f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}" - ) - - return Response(files, status=status.HTTP_200_OK) - - -class ProjectDeployBoardViewSet(BaseViewSet): - permission_classes = [ - ProjectMemberPermission, - ] - serializer_class = ProjectDeployBoardSerializer - model = ProjectDeployBoard - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - project_id=self.kwargs.get("project_id"), - ) - .select_related("project") - ) - - def create(self, request, slug, project_id): - comments = request.data.get("comments", False) - reactions = request.data.get("reactions", False) - inbox = request.data.get("inbox", None) - votes = request.data.get("votes", False) - views = request.data.get( - "views", - { - "list": True, - "kanban": True, - "calendar": True, - "gantt": True, - "spreadsheet": True, - }, - ) - - project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( - anchor=f"{slug}/{project_id}", - project_id=project_id, - ) - project_deploy_board.comments = comments - project_deploy_board.reactions = reactions - project_deploy_board.inbox = inbox - project_deploy_board.votes = votes - project_deploy_board.views = views - - project_deploy_board.save() - - serializer = ProjectDeployBoardSerializer(project_deploy_board) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class UserProjectRolesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceUserPermission, - ] - - def get(self, request, slug): - project_members = ProjectMember.objects.filter( - workspace__slug=slug, - member_id=request.user.id, - ).values("project_id", "role") - - project_members = { - str(member["project_id"]): member["role"] - for member in project_members - } - return Response(project_members, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/project/base.py b/apiserver/plane/app/views/project/base.py new file mode 100644 index 00000000000..1672cd47ca0 --- /dev/null +++ b/apiserver/plane/app/views/project/base.py @@ -0,0 +1,652 @@ +# Python imports +import boto3 + +# Django imports +from django.db import IntegrityError +from django.db.models import ( + Prefetch, + Q, + Exists, + OuterRef, + F, + Func, + Subquery, +) +from django.conf import settings +from django.utils import timezone + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework import serializers +from rest_framework.permissions import AllowAny + +# Module imports +from plane.app.views.base import BaseViewSet, BaseAPIView, WebhookMixin +from plane.app.serializers import ( + ProjectSerializer, + ProjectListSerializer, + ProjectFavoriteSerializer, + ProjectDeployBoardSerializer, +) + +from plane.app.permissions import ( + ProjectBasePermission, + ProjectMemberPermission, +) + +from plane.db.models import ( + Project, + ProjectMember, + Workspace, + State, + ProjectFavorite, + ProjectIdentifier, + Module, + Cycle, + Inbox, + ProjectDeployBoard, + IssueProperty, + Issue, +) +from plane.utils.cache import cache_response + + +class ProjectViewSet(WebhookMixin, BaseViewSet): + serializer_class = ProjectListSerializer + model = Project + webhook_event = "project" + + permission_classes = [ + ProjectBasePermission, + ] + + def get_queryset(self): + sort_order = ProjectMember.objects.filter( + member=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ).values("sort_order") + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter( + Q( + project_projectmember__member=self.request.user, + project_projectmember__is_active=True, + ) + | Q(network=2) + ) + .select_related( + "workspace", + "workspace__owner", + "default_assignee", + "project_lead", + ) + .annotate( + is_favorite=Exists( + ProjectFavorite.objects.filter( + user=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) + .annotate( + is_member=Exists( + ProjectMember.objects.filter( + member=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ) + ) + ) + .annotate( + total_members=ProjectMember.objects.filter( + project_id=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + total_modules=Module.objects.filter(project_id=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + member_role=ProjectMember.objects.filter( + project_id=OuterRef("pk"), + member_id=self.request.user.id, + is_active=True, + ).values("role") + ) + .annotate( + is_deployed=Exists( + ProjectDeployBoard.objects.filter( + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) + .annotate(sort_order=Subquery(sort_order)) + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ).select_related("member"), + to_attr="members_list", + ) + ) + .distinct() + ) + + def list(self, request, slug): + fields = [ + field + for field in request.GET.get("fields", "").split(",") + if field + ] + projects = self.get_queryset().order_by("sort_order", "name") + if request.GET.get("per_page", False) and request.GET.get( + "cursor", False + ): + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectListSerializer( + projects, many=True + ).data, + ) + projects = ProjectListSerializer( + projects, many=True, fields=fields if fields else None + ).data + return Response(projects, status=status.HTTP_200_OK) + + def retrieve(self, request, slug, pk): + project = ( + self.get_queryset() + .filter(archived_at__isnull=True) + .filter(pk=pk) + .annotate( + total_issues=Issue.issue_objects.filter( + project_id=self.kwargs.get("pk"), + parent__isnull=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues=Issue.issue_objects.filter( + project_id=self.kwargs.get("pk"), + parent__isnull=False, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + archived_issues=Issue.objects.filter( + project_id=self.kwargs.get("pk"), + archived_at__isnull=False, + parent__isnull=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + archived_sub_issues=Issue.objects.filter( + project_id=self.kwargs.get("pk"), + archived_at__isnull=False, + parent__isnull=False, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + draft_issues=Issue.objects.filter( + project_id=self.kwargs.get("pk"), + is_draft=True, + parent__isnull=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + draft_sub_issues=Issue.objects.filter( + project_id=self.kwargs.get("pk"), + is_draft=True, + parent__isnull=False, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ).first() + + serializer = ProjectListSerializer(project) + return Response(serializer.data, status=status.HTTP_200_OK) + + def create(self, request, slug): + try: + workspace = Workspace.objects.get(slug=slug) + + serializer = ProjectSerializer( + data={**request.data}, context={"workspace_id": workspace.id} + ) + if serializer.is_valid(): + serializer.save() + + # Add the user as Administrator to the project + _ = ProjectMember.objects.create( + project_id=serializer.data["id"], + member=request.user, + role=20, + ) + # Also create the issue property for the user + _ = IssueProperty.objects.create( + project_id=serializer.data["id"], + user=request.user, + ) + + if serializer.data["project_lead"] is not None and str( + serializer.data["project_lead"] + ) != str(request.user.id): + ProjectMember.objects.create( + project_id=serializer.data["id"], + member_id=serializer.data["project_lead"], + role=20, + ) + # Also create the issue property for the user + IssueProperty.objects.create( + project_id=serializer.data["id"], + user_id=serializer.data["project_lead"], + ) + + # Default states + states = [ + { + "name": "Backlog", + "color": "#A3A3A3", + "sequence": 15000, + "group": "backlog", + "default": True, + }, + { + "name": "Todo", + "color": "#3A3A3A", + "sequence": 25000, + "group": "unstarted", + }, + { + "name": "In Progress", + "color": "#F59E0B", + "sequence": 35000, + "group": "started", + }, + { + "name": "Done", + "color": "#16A34A", + "sequence": 45000, + "group": "completed", + }, + { + "name": "Cancelled", + "color": "#EF4444", + "sequence": 55000, + "group": "cancelled", + }, + ] + + State.objects.bulk_create( + [ + State( + name=state["name"], + color=state["color"], + project=serializer.instance, + sequence=state["sequence"], + workspace=serializer.instance.workspace, + group=state["group"], + default=state.get("default", False), + created_by=request.user, + ) + for state in states + ] + ) + + project = ( + self.get_queryset() + .filter(pk=serializer.data["id"]) + .first() + ) + serializer = ProjectListSerializer(project) + return Response( + serializer.data, status=status.HTTP_201_CREATED + ) + return Response( + serializer.errors, + status=status.HTTP_400_BAD_REQUEST, + ) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The project name is already taken"}, + status=status.HTTP_410_GONE, + ) + except Workspace.DoesNotExist: + return Response( + {"error": "Workspace does not exist"}, + status=status.HTTP_404_NOT_FOUND, + ) + except serializers.ValidationError: + return Response( + {"identifier": "The project identifier is already taken"}, + status=status.HTTP_410_GONE, + ) + + def partial_update(self, request, slug, pk=None): + try: + workspace = Workspace.objects.get(slug=slug) + + project = Project.objects.get(pk=pk) + + if project.archived_at: + return Response( + {"error": "Archived projects cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = ProjectSerializer( + project, + data={**request.data}, + context={"workspace_id": workspace.id}, + partial=True, + ) + + if serializer.is_valid(): + serializer.save() + if serializer.data["inbox_view"]: + Inbox.objects.get_or_create( + name=f"{project.name} Inbox", + project=project, + is_default=True, + ) + + # Create the triage state in Backlog group + State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=pk, + color="#ff7700", + ) + + project = ( + self.get_queryset() + .filter(pk=serializer.data["id"]) + .first() + ) + serializer = ProjectListSerializer(project) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response( + serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) + + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"name": "The project name is already taken"}, + status=status.HTTP_410_GONE, + ) + except (Project.DoesNotExist, Workspace.DoesNotExist): + return Response( + {"error": "Project does not exist"}, + status=status.HTTP_404_NOT_FOUND, + ) + except serializers.ValidationError: + return Response( + {"identifier": "The project identifier is already taken"}, + status=status.HTTP_410_GONE, + ) + + +class ProjectArchiveUnarchiveEndpoint(BaseAPIView): + + permission_classes = [ + ProjectBasePermission, + ] + + def post(self, request, slug, project_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + project.archived_at = timezone.now() + project.save() + return Response( + {"archived_at": str(project.archived_at)}, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug, project_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + project.archived_at = None + project.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ProjectIdentifierEndpoint(BaseAPIView): + permission_classes = [ + ProjectBasePermission, + ] + + def get(self, request, slug): + name = request.GET.get("name", "").strip().upper() + + if name == "": + return Response( + {"error": "Name is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + exists = ProjectIdentifier.objects.filter( + name=name, workspace__slug=slug + ).values("id", "name", "project") + + return Response( + {"exists": len(exists), "identifiers": exists}, + status=status.HTTP_200_OK, + ) + + def delete(self, request, slug): + name = request.data.get("name", "").strip().upper() + + if name == "": + return Response( + {"error": "Name is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if Project.objects.filter( + identifier=name, workspace__slug=slug + ).exists(): + return Response( + { + "error": "Cannot delete an identifier of an existing project" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + ProjectIdentifier.objects.filter( + name=name, workspace__slug=slug + ).delete() + + return Response( + status=status.HTTP_204_NO_CONTENT, + ) + + +class ProjectUserViewsEndpoint(BaseAPIView): + def post(self, request, slug, project_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + project_member = ProjectMember.objects.filter( + member=request.user, + project=project, + is_active=True, + ).first() + + if project_member is None: + return Response( + {"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN + ) + + view_props = project_member.view_props + default_props = project_member.default_props + preferences = project_member.preferences + sort_order = project_member.sort_order + + project_member.view_props = request.data.get("view_props", view_props) + project_member.default_props = request.data.get( + "default_props", default_props + ) + project_member.preferences = request.data.get( + "preferences", preferences + ) + project_member.sort_order = request.data.get("sort_order", sort_order) + + project_member.save() + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ProjectFavoritesViewSet(BaseViewSet): + serializer_class = ProjectFavoriteSerializer + model = ProjectFavorite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(user=self.request.user) + .select_related( + "project", "project__project_lead", "project__default_assignee" + ) + .select_related("workspace", "workspace__owner") + ) + + def perform_create(self, serializer): + serializer.save(user=self.request.user) + + def create(self, request, slug): + serializer = ProjectFavoriteSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(user=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id): + project_favorite = ProjectFavorite.objects.get( + project=project_id, user=request.user, workspace__slug=slug + ) + project_favorite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class ProjectPublicCoverImagesEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + # Cache the below api for 24 hours + @cache_response(60 * 60 * 24, user=False) + def get(self, request): + files = [] + s3 = boto3.client( + "s3", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + ) + params = { + "Bucket": settings.AWS_STORAGE_BUCKET_NAME, + "Prefix": "static/project-cover/", + } + + response = s3.list_objects_v2(**params) + # Extracting file keys from the response + if "Contents" in response: + for content in response["Contents"]: + if not content["Key"].endswith( + "/" + ): # This line ensures we're only getting files, not "sub-folders" + files.append( + f"https://{settings.AWS_STORAGE_BUCKET_NAME}.s3.{settings.AWS_REGION}.amazonaws.com/{content['Key']}" + ) + + return Response(files, status=status.HTTP_200_OK) + + +class ProjectDeployBoardViewSet(BaseViewSet): + permission_classes = [ + ProjectMemberPermission, + ] + serializer_class = ProjectDeployBoardSerializer + model = ProjectDeployBoard + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .select_related("project") + ) + + def create(self, request, slug, project_id): + comments = request.data.get("comments", False) + reactions = request.data.get("reactions", False) + inbox = request.data.get("inbox", None) + votes = request.data.get("votes", False) + views = request.data.get( + "views", + { + "list": True, + "kanban": True, + "calendar": True, + "gantt": True, + "spreadsheet": True, + }, + ) + + project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( + anchor=f"{slug}/{project_id}", + project_id=project_id, + ) + project_deploy_board.comments = comments + project_deploy_board.reactions = reactions + project_deploy_board.inbox = inbox + project_deploy_board.votes = votes + project_deploy_board.views = views + + project_deploy_board.save() + + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/project/invite.py b/apiserver/plane/app/views/project/invite.py new file mode 100644 index 00000000000..d199a877004 --- /dev/null +++ b/apiserver/plane/app/views/project/invite.py @@ -0,0 +1,286 @@ +# Python imports +import jwt +from datetime import datetime + +# Django imports +from django.core.exceptions import ValidationError +from django.core.validators import validate_email +from django.conf import settings +from django.utils import timezone + +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from rest_framework.permissions import AllowAny + +# Module imports +from .base import BaseViewSet, BaseAPIView +from plane.app.serializers import ProjectMemberInviteSerializer + +from plane.app.permissions import ProjectBasePermission + +from plane.db.models import ( + ProjectMember, + Workspace, + ProjectMemberInvite, + User, + WorkspaceMember, + IssueProperty, +) + + +class ProjectInvitationsViewset(BaseViewSet): + serializer_class = ProjectMemberInviteSerializer + model = ProjectMemberInvite + + search_fields = [] + + permission_classes = [ + ProjectBasePermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .select_related("project") + .select_related("workspace", "workspace__owner") + ) + + def create(self, request, slug, project_id): + emails = request.data.get("emails", []) + + # Check if email is provided + if not emails: + return Response( + {"error": "Emails are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + requesting_user = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member_id=request.user.id, + ) + + # Check if any invited user has an higher role + if len( + [ + email + for email in emails + if int(email.get("role", 10)) > requesting_user.role + ] + ): + return Response( + {"error": "You cannot invite a user with higher role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + project_invitations = [] + for email in emails: + try: + validate_email(email.get("email")) + project_invitations.append( + ProjectMemberInvite( + email=email.get("email").strip().lower(), + project_id=project_id, + workspace_id=workspace.id, + token=jwt.encode( + { + "email": email, + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=email.get("role", 10), + created_by=request.user, + ) + ) + except ValidationError: + return Response( + { + "error": f"Invalid email - {email} provided a valid email address is required to send the invite" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Create workspace member invite + project_invitations = ProjectMemberInvite.objects.bulk_create( + project_invitations, batch_size=10, ignore_conflicts=True + ) + current_site = request.META.get("HTTP_ORIGIN") + + # Send invitations + for invitation in project_invitations: + project_invitations.delay( + invitation.email, + project_id, + invitation.token, + current_site, + request.user.email, + ) + + return Response( + { + "message": "Email sent successfully", + }, + status=status.HTTP_200_OK, + ) + + +class UserProjectInvitationsViewset(BaseViewSet): + serializer_class = ProjectMemberInviteSerializer + model = ProjectMemberInvite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(email=self.request.user.email) + .select_related("workspace", "workspace__owner", "project") + ) + + def create(self, request, slug): + project_ids = request.data.get("project_ids", []) + + # Get the workspace user role + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, + ) + + workspace_role = workspace_member.role + workspace = workspace_member.workspace + + # If the user was already part of workspace + _ = ProjectMember.objects.filter( + workspace__slug=slug, + project_id__in=project_ids, + member=request.user, + ).update(is_active=True) + + ProjectMember.objects.bulk_create( + [ + ProjectMember( + project_id=project_id, + member=request.user, + role=15 if workspace_role >= 15 else 10, + workspace=workspace, + created_by=request.user, + ) + for project_id in project_ids + ], + ignore_conflicts=True, + ) + + IssueProperty.objects.bulk_create( + [ + IssueProperty( + project_id=project_id, + user=request.user, + workspace=workspace, + created_by=request.user, + ) + for project_id in project_ids + ], + ignore_conflicts=True, + ) + + return Response( + {"message": "Projects joined successfully"}, + status=status.HTTP_201_CREATED, + ) + + +class ProjectJoinEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def post(self, request, slug, project_id, pk): + project_invite = ProjectMemberInvite.objects.get( + pk=pk, + project_id=project_id, + workspace__slug=slug, + ) + + email = request.data.get("email", "") + + if email == "" or project_invite.email != email: + return Response( + {"error": "You do not have permission to join the project"}, + status=status.HTTP_403_FORBIDDEN, + ) + + if project_invite.responded_at is None: + project_invite.accepted = request.data.get("accepted", False) + project_invite.responded_at = timezone.now() + project_invite.save() + + if project_invite.accepted: + # Check if the user account exists + user = User.objects.filter(email=email).first() + + # Check if user is a part of workspace + workspace_member = WorkspaceMember.objects.filter( + workspace__slug=slug, member=user + ).first() + # Add him to workspace + if workspace_member is None: + _ = WorkspaceMember.objects.create( + workspace_id=project_invite.workspace_id, + member=user, + role=( + 15 + if project_invite.role >= 15 + else project_invite.role + ), + ) + else: + # Else make him active + workspace_member.is_active = True + workspace_member.save() + + # Check if the user was already a member of project then activate the user + project_member = ProjectMember.objects.filter( + workspace_id=project_invite.workspace_id, member=user + ).first() + if project_member is None: + # Create a Project Member + _ = ProjectMember.objects.create( + workspace_id=project_invite.workspace_id, + member=user, + role=project_invite.role, + ) + else: + project_member.is_active = True + project_member.role = project_member.role + project_member.save() + + return Response( + {"message": "Project Invitation Accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"message": "Project Invitation was not accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"error": "You have already responded to the invitation request"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug, project_id, pk): + project_invitation = ProjectMemberInvite.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + serializer = ProjectMemberInviteSerializer(project_invitation) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/project/member.py b/apiserver/plane/app/views/project/member.py new file mode 100644 index 00000000000..187dfc8d05c --- /dev/null +++ b/apiserver/plane/app/views/project/member.py @@ -0,0 +1,349 @@ +# Third Party imports +from rest_framework.response import Response +from rest_framework import status + +# Module imports +from .base import BaseViewSet, BaseAPIView +from plane.app.serializers import ( + ProjectMemberSerializer, + ProjectMemberAdminSerializer, + ProjectMemberRoleSerializer, +) + +from plane.app.permissions import ( + ProjectBasePermission, + ProjectMemberPermission, + ProjectLitePermission, + WorkspaceUserPermission, +) + +from plane.db.models import ( + Project, + ProjectMember, + Workspace, + TeamMember, + IssueProperty, +) + + +class ProjectMemberViewSet(BaseViewSet): + serializer_class = ProjectMemberAdminSerializer + model = ProjectMember + permission_classes = [ + ProjectMemberPermission, + ] + + def get_permissions(self): + if self.action == "leave": + self.permission_classes = [ + ProjectLitePermission, + ] + else: + self.permission_classes = [ + ProjectMemberPermission, + ] + + return super(ProjectMemberViewSet, self).get_permissions() + + search_fields = [ + "member__display_name", + "member__first_name", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(member__is_bot=False) + .filter() + .select_related("project") + .select_related("member") + .select_related("workspace", "workspace__owner") + ) + + def create(self, request, slug, project_id): + members = request.data.get("members", []) + + # get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + if not len(members): + return Response( + {"error": "Atleast one member is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + bulk_project_members = [] + bulk_issue_props = [] + + project_members = ( + ProjectMember.objects.filter( + workspace__slug=slug, + member_id__in=[member.get("member_id") for member in members], + ) + .values("member_id", "sort_order") + .order_by("sort_order") + ) + + bulk_project_members = [] + member_roles = { + member.get("member_id"): member.get("role") for member in members + } + # Update roles in the members array based on the member_roles dictionary + for project_member in ProjectMember.objects.filter( + project_id=project_id, + member_id__in=[member.get("member_id") for member in members], + ): + project_member.role = member_roles[str(project_member.member_id)] + project_member.is_active = True + bulk_project_members.append(project_member) + + # Update the roles of the existing members + ProjectMember.objects.bulk_update( + bulk_project_members, ["is_active", "role"], batch_size=100 + ) + + for member in members: + sort_order = [ + project_member.get("sort_order") + for project_member in project_members + if str(project_member.get("member_id")) + == str(member.get("member_id")) + ] + bulk_project_members.append( + ProjectMember( + member_id=member.get("member_id"), + role=member.get("role", 10), + project_id=project_id, + workspace_id=project.workspace_id, + sort_order=( + sort_order[0] - 10000 if len(sort_order) else 65535 + ), + ) + ) + bulk_issue_props.append( + IssueProperty( + user_id=member.get("member_id"), + project_id=project_id, + workspace_id=project.workspace_id, + ) + ) + + project_members = ProjectMember.objects.bulk_create( + bulk_project_members, + batch_size=10, + ignore_conflicts=True, + ) + + _ = IssueProperty.objects.bulk_create( + bulk_issue_props, batch_size=10, ignore_conflicts=True + ) + + project_members = ProjectMember.objects.filter( + project_id=project_id, + member_id__in=[member.get("member_id") for member in members], + ) + serializer = ProjectMemberRoleSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def list(self, request, slug, project_id): + # Get the list of project members for the project + project_members = ProjectMember.objects.filter( + project_id=project_id, + workspace__slug=slug, + member__is_bot=False, + is_active=True, + ).select_related("project", "member", "workspace") + + serializer = ProjectMemberRoleSerializer( + project_members, fields=("id", "member", "role"), many=True + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + def partial_update(self, request, slug, project_id, pk): + project_member = ProjectMember.objects.get( + pk=pk, + workspace__slug=slug, + project_id=project_id, + is_active=True, + ) + if request.user.id == project_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + # Check while updating user roles + requested_project_member = ProjectMember.objects.get( + project_id=project_id, + workspace__slug=slug, + member=request.user, + is_active=True, + ) + if ( + "role" in request.data + and int(request.data.get("role", project_member.role)) + > requested_project_member.role + ): + return Response( + { + "error": "You cannot update a role that is higher than your own role" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = ProjectMemberSerializer( + project_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, slug, project_id, pk): + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + pk=pk, + member__is_bot=False, + is_active=True, + ) + # check requesting user role + requesting_project_member = ProjectMember.objects.get( + workspace__slug=slug, + member=request.user, + project_id=project_id, + is_active=True, + ) + # User cannot remove himself + if str(project_member.id) == str(requesting_project_member.id): + return Response( + { + "error": "You cannot remove yourself from the workspace. Please use leave workspace" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # User cannot deactivate higher role + if requesting_project_member.role < project_member.role: + return Response( + { + "error": "You cannot remove a user having role higher than you" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + project_member.is_active = False + project_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + def leave(self, request, slug, project_id): + project_member = ProjectMember.objects.get( + workspace__slug=slug, + project_id=project_id, + member=request.user, + is_active=True, + ) + + # Check if the leaving user is the only admin of the project + if ( + project_member.role == 20 + and not ProjectMember.objects.filter( + workspace__slug=slug, + project_id=project_id, + role=20, + is_active=True, + ).count() + > 1 + ): + return Response( + { + "error": "You cannot leave the project as your the only admin of the project you will have to either delete the project or create an another admin", + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # Deactivate the user + project_member.is_active = False + project_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class AddTeamToProjectEndpoint(BaseAPIView): + permission_classes = [ + ProjectBasePermission, + ] + + def post(self, request, slug, project_id): + team_members = TeamMember.objects.filter( + workspace__slug=slug, team__in=request.data.get("teams", []) + ).values_list("member", flat=True) + + if len(team_members) == 0: + return Response( + {"error": "No such team exists"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + project_members = [] + issue_props = [] + for member in team_members: + project_members.append( + ProjectMember( + project_id=project_id, + member_id=member, + workspace=workspace, + created_by=request.user, + ) + ) + issue_props.append( + IssueProperty( + project_id=project_id, + user_id=member, + workspace=workspace, + created_by=request.user, + ) + ) + + ProjectMember.objects.bulk_create( + project_members, batch_size=10, ignore_conflicts=True + ) + + _ = IssueProperty.objects.bulk_create( + issue_props, batch_size=10, ignore_conflicts=True + ) + + serializer = ProjectMemberSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + +class ProjectMemberUserEndpoint(BaseAPIView): + def get(self, request, slug, project_id): + project_member = ProjectMember.objects.get( + project_id=project_id, + workspace__slug=slug, + member=request.user, + is_active=True, + ) + serializer = ProjectMemberSerializer(project_member) + + return Response(serializer.data, status=status.HTTP_200_OK) + + +class UserProjectRolesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceUserPermission, + ] + + def get(self, request, slug): + project_members = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=request.user.id, + ).values("project_id", "role") + + project_members = { + str(member["project_id"]): member["role"] + for member in project_members + } + return Response(project_members, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/search.py b/apiserver/plane/app/views/search.py index a2ed1c015ad..4a4ffd826d8 100644 --- a/apiserver/plane/app/views/search.py +++ b/apiserver/plane/app/views/search.py @@ -50,6 +50,7 @@ def filter_projects(self, query, slug, project_id, workspace_search): q, project_projectmember__member=self.request.user, project_projectmember__is_active=True, + archived_at__isnull=True, workspace__slug=slug, ) .distinct() @@ -72,6 +73,7 @@ def filter_issues(self, query, slug, project_id, workspace_search): q, project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, workspace__slug=slug, ) @@ -97,6 +99,7 @@ def filter_cycles(self, query, slug, project_id, workspace_search): q, project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, workspace__slug=slug, ) @@ -121,6 +124,7 @@ def filter_modules(self, query, slug, project_id, workspace_search): q, project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, workspace__slug=slug, ) @@ -145,6 +149,7 @@ def filter_pages(self, query, slug, project_id, workspace_search): q, project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, workspace__slug=slug, ) @@ -169,6 +174,7 @@ def filter_views(self, query, slug, project_id, workspace_search): q, project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, workspace__slug=slug, ) @@ -235,6 +241,7 @@ def get(self, request, slug, project_id): cycle = request.query_params.get("cycle", "false") module = request.query_params.get("module", False) sub_issue = request.query_params.get("sub_issue", "false") + target_date = request.query_params.get("target_date", True) issue_id = request.query_params.get("issue_id", False) @@ -242,6 +249,7 @@ def get(self, request, slug, project_id): workspace__slug=slug, project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True ) if workspace_search == "false": @@ -253,7 +261,8 @@ def get(self, request, slug, project_id): if parent == "true" and issue_id: issue = Issue.issue_objects.get(pk=issue_id) issues = issues.filter( - ~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id)) + ~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id) + ) if issue_relation == "true" and issue_id: issue = Issue.issue_objects.get(pk=issue_id) issues = issues.filter( @@ -273,6 +282,9 @@ def get(self, request, slug, project_id): if module: issues = issues.exclude(issue_module__module=module) + if target_date == "none": + issues = issues.filter(target_date__isnull=True) + return Response( issues.values( "name", diff --git a/apiserver/plane/app/views/state.py b/apiserver/plane/app/views/state/base.py similarity index 89% rename from apiserver/plane/app/views/state.py rename to apiserver/plane/app/views/state/base.py index 34b3d1dcc01..7b09044906d 100644 --- a/apiserver/plane/app/views/state.py +++ b/apiserver/plane/app/views/state/base.py @@ -9,14 +9,13 @@ from rest_framework import status # Module imports -from . import BaseViewSet, BaseAPIView +from .. import BaseViewSet from plane.app.serializers import StateSerializer from plane.app.permissions import ( ProjectEntityPermission, - WorkspaceEntityPermission, ) from plane.db.models import State, Issue - +from plane.utils.cache import invalidate_cache class StateViewSet(BaseViewSet): serializer_class = StateSerializer @@ -34,6 +33,7 @@ def get_queryset(self): .filter( project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, ) .filter(~Q(name="Triage")) .select_related("project") @@ -41,6 +41,7 @@ def get_queryset(self): .distinct() ) + @invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False) def create(self, request, slug, project_id): serializer = StateSerializer(data=request.data) if serializer.is_valid(): @@ -61,6 +62,7 @@ def list(self, request, slug, project_id): return Response(state_dict, status=status.HTTP_200_OK) return Response(states, status=status.HTTP_200_OK) + @invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False) def mark_as_default(self, request, slug, project_id, pk): # Select all the states which are marked as default _ = State.objects.filter( @@ -71,6 +73,7 @@ def mark_as_default(self, request, slug, project_id, pk): ).update(default=True) return Response(status=status.HTTP_204_NO_CONTENT) + @invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False) def destroy(self, request, slug, project_id, pk): state = State.objects.get( ~Q(name="Triage"), diff --git a/apiserver/plane/app/views/user.py b/apiserver/plane/app/views/user/base.py similarity index 89% rename from apiserver/plane/app/views/user.py rename to apiserver/plane/app/views/user/base.py index 7764e3b9753..4d69d1cf27c 100644 --- a/apiserver/plane/app/views/user.py +++ b/apiserver/plane/app/views/user/base.py @@ -1,25 +1,24 @@ +# Django imports +from django.db.models import Case, Count, IntegerField, Q, When + # Third party imports -from rest_framework.response import Response from rest_framework import status - +from rest_framework.response import Response # Module imports from plane.app.serializers import ( - UserSerializer, IssueActivitySerializer, UserMeSerializer, UserMeSettingsSerializer, + UserSerializer, ) - -from plane.app.views.base import BaseViewSet, BaseAPIView -from plane.db.models import User, IssueActivity, WorkspaceMember, ProjectMember +from plane.app.views.base import BaseAPIView, BaseViewSet +from plane.db.models import IssueActivity, ProjectMember, User, WorkspaceMember from plane.license.models import Instance, InstanceAdmin +from plane.utils.cache import cache_response, invalidate_cache from plane.utils.paginator import BasePaginator -from django.db.models import Q, F, Count, Case, When, IntegerField - - class UserEndpoint(BaseViewSet): serializer_class = UserSerializer model = User @@ -27,6 +26,7 @@ class UserEndpoint(BaseViewSet): def get_object(self): return self.request.user + @cache_response(60 * 60) def retrieve(self, request): serialized_data = UserMeSerializer(request.user).data return Response( @@ -34,10 +34,12 @@ def retrieve(self, request): status=status.HTTP_200_OK, ) + @cache_response(60 * 60) def retrieve_user_settings(self, request): serialized_data = UserMeSettingsSerializer(request.user).data return Response(serialized_data, status=status.HTTP_200_OK) + @cache_response(60 * 60) def retrieve_instance_admin(self, request): instance = Instance.objects.first() is_admin = InstanceAdmin.objects.filter( @@ -47,6 +49,11 @@ def retrieve_instance_admin(self, request): {"is_instance_admin": is_admin}, status=status.HTTP_200_OK ) + @invalidate_cache(path="/api/users/me/") + def partial_update(self, request, *args, **kwargs): + return super().partial_update(request, *args, **kwargs) + + @invalidate_cache(path="/api/users/me/") def deactivate(self, request): # Check all workspace user is active user = self.get_object() @@ -145,6 +152,8 @@ def deactivate(self, request): class UpdateUserOnBoardedEndpoint(BaseAPIView): + + @invalidate_cache(path="/api/users/me/") def patch(self, request): user = User.objects.get(pk=request.user.id, is_active=True) user.is_onboarded = request.data.get("is_onboarded", False) @@ -155,6 +164,8 @@ def patch(self, request): class UpdateUserTourCompletedEndpoint(BaseAPIView): + + @invalidate_cache(path="/api/users/me/") def patch(self, request): user = User.objects.get(pk=request.user.id, is_active=True) user.is_tour_completed = request.data.get("is_tour_completed", False) @@ -165,6 +176,7 @@ def patch(self, request): class UserActivityEndpoint(BaseAPIView, BasePaginator): + def get(self, request): queryset = IssueActivity.objects.filter( actor=request.user diff --git a/apiserver/plane/app/views/view.py b/apiserver/plane/app/views/view/base.py similarity index 96% rename from apiserver/plane/app/views/view.py rename to apiserver/plane/app/views/view/base.py index ade445fae80..45e7bd29cf7 100644 --- a/apiserver/plane/app/views/view.py +++ b/apiserver/plane/app/views/view/base.py @@ -15,18 +15,15 @@ from django.views.decorators.gzip import gzip_page from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField +from django.db.models import UUIDField from django.db.models.functions import Coalesce -from django.contrib.postgres.aggregates import ArrayAgg -from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField # Third party imports from rest_framework.response import Response from rest_framework import status # Module imports -from . import BaseViewSet +from .. import BaseViewSet from plane.app.serializers import ( IssueViewSerializer, IssueSerializer, @@ -128,7 +125,8 @@ def get_queryset(self): ArrayAgg( "assignees__id", distinct=True, - filter=~Q(assignees__id__isnull=True), + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), ), Value([], output_field=ArrayField(UUIDField())), ), @@ -146,11 +144,6 @@ def get_queryset(self): @method_decorator(gzip_page) def list(self, request, slug): filters = issue_filters(request.query_params, "GET") - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] # Custom ordering for priority and state priority_order = ["urgent", "high", "medium", "low", "none"] @@ -290,6 +283,7 @@ def get_queryset(self): .filter( project__project_projectmember__member=self.request.user, project__project_projectmember__is_active=True, + project__archived_at__isnull=True, ) .select_related("project") .select_related("workspace") diff --git a/apiserver/plane/app/views/webhook.py b/apiserver/plane/app/views/webhook/base.py similarity index 98% rename from apiserver/plane/app/views/webhook.py rename to apiserver/plane/app/views/webhook/base.py index fe69cd7e64e..9586722a0cc 100644 --- a/apiserver/plane/app/views/webhook.py +++ b/apiserver/plane/app/views/webhook/base.py @@ -8,7 +8,7 @@ # Module imports from plane.db.models import Webhook, WebhookLog, Workspace from plane.db.models.webhook import generate_token -from .base import BaseAPIView +from ..base import BaseAPIView from plane.app.permissions import WorkspaceOwnerPermission from plane.app.serializers import WebhookSerializer, WebhookLogSerializer @@ -41,7 +41,7 @@ def post(self, request, slug): raise IntegrityError def get(self, request, slug, pk=None): - if pk == None: + if pk is None: webhooks = Webhook.objects.filter(workspace__slug=slug) serializer = WebhookSerializer( webhooks, diff --git a/apiserver/plane/app/views/workspace.py b/apiserver/plane/app/views/workspace.py deleted file mode 100644 index 47de86a1c17..00000000000 --- a/apiserver/plane/app/views/workspace.py +++ /dev/null @@ -1,1761 +0,0 @@ -# Python imports -import jwt -from datetime import date, datetime -from dateutil.relativedelta import relativedelta - -# Django imports -from django.db import IntegrityError -from django.conf import settings -from django.utils import timezone -from django.core.exceptions import ValidationError -from django.core.validators import validate_email -from django.db.models import ( - Prefetch, - OuterRef, - Func, - F, - Q, - Count, - Case, - Value, - CharField, - When, - Max, - IntegerField, - Sum, -) -from django.db.models.functions import ExtractWeek, Cast, ExtractDay -from django.db.models.fields import DateField -from django.contrib.postgres.aggregates import ArrayAgg -from django.contrib.postgres.fields import ArrayField -from django.db.models import Value, UUIDField -from django.db.models.functions import Coalesce - -# Third party modules -from rest_framework import status -from rest_framework.response import Response -from rest_framework.permissions import AllowAny - -# Module imports -from plane.app.serializers import ( - WorkSpaceSerializer, - WorkSpaceMemberSerializer, - TeamSerializer, - WorkSpaceMemberInviteSerializer, - UserLiteSerializer, - ProjectMemberSerializer, - WorkspaceThemeSerializer, - IssueActivitySerializer, - IssueSerializer, - WorkspaceMemberAdminSerializer, - WorkspaceMemberMeSerializer, - ProjectMemberRoleSerializer, - WorkspaceUserPropertiesSerializer, - WorkspaceEstimateSerializer, - StateSerializer, - LabelSerializer, -) -from plane.app.views.base import BaseAPIView -from . import BaseViewSet -from plane.db.models import ( - State, - User, - Workspace, - WorkspaceMemberInvite, - Team, - ProjectMember, - IssueActivity, - Issue, - WorkspaceTheme, - IssueLink, - IssueAttachment, - IssueSubscriber, - Project, - Label, - WorkspaceMember, - CycleIssue, - IssueReaction, - WorkspaceUserProperties, - Estimate, - EstimatePoint, - Module, - ModuleLink, - Cycle, -) -from plane.app.permissions import ( - WorkSpaceBasePermission, - WorkSpaceAdminPermission, - WorkspaceEntityPermission, - WorkspaceViewerPermission, - WorkspaceUserPermission, - ProjectLitePermission, -) -from plane.bgtasks.workspace_invitation_task import workspace_invitation -from plane.utils.issue_filters import issue_filters -from plane.bgtasks.event_tracking_task import workspace_invite_event -from plane.app.serializers.module import ( - ModuleSerializer, -) -from plane.app.serializers.cycle import ( - CycleSerializer, -) - - -class WorkSpaceViewSet(BaseViewSet): - model = Workspace - serializer_class = WorkSpaceSerializer - permission_classes = [ - WorkSpaceBasePermission, - ] - - search_fields = [ - "name", - ] - filterset_fields = [ - "owner", - ] - - lookup_field = "slug" - - def get_queryset(self): - member_count = ( - WorkspaceMember.objects.filter( - workspace=OuterRef("id"), - member__is_bot=False, - is_active=True, - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - issue_count = ( - Issue.issue_objects.filter(workspace=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - return ( - self.filter_queryset( - super().get_queryset().select_related("owner") - ) - .order_by("name") - .filter( - workspace_member__member=self.request.user, - workspace_member__is_active=True, - ) - .annotate(total_members=member_count) - .annotate(total_issues=issue_count) - .select_related("owner") - ) - - def create(self, request): - try: - serializer = WorkSpaceSerializer(data=request.data) - - slug = request.data.get("slug", False) - name = request.data.get("name", False) - - if not name or not slug: - return Response( - {"error": "Both name and slug are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - if len(name) > 80 or len(slug) > 48: - return Response( - { - "error": "The maximum length for name is 80 and for slug is 48" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - if serializer.is_valid(): - serializer.save(owner=request.user) - # Create Workspace member - _ = WorkspaceMember.objects.create( - workspace_id=serializer.data["id"], - member=request.user, - role=20, - company_role=request.data.get("company_role", ""), - ) - return Response( - serializer.data, status=status.HTTP_201_CREATED - ) - return Response( - [serializer.errors[error][0] for error in serializer.errors], - status=status.HTTP_400_BAD_REQUEST, - ) - - except IntegrityError as e: - if "already exists" in str(e): - return Response( - {"slug": "The workspace with the slug already exists"}, - status=status.HTTP_410_GONE, - ) - - -class UserWorkSpacesEndpoint(BaseAPIView): - search_fields = [ - "name", - ] - filterset_fields = [ - "owner", - ] - - def get(self, request): - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] - member_count = ( - WorkspaceMember.objects.filter( - workspace=OuterRef("id"), - member__is_bot=False, - is_active=True, - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - issue_count = ( - Issue.issue_objects.filter(workspace=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - - workspace = ( - Workspace.objects.prefetch_related( - Prefetch( - "workspace_member", - queryset=WorkspaceMember.objects.filter( - member=request.user, is_active=True - ), - ) - ) - .select_related("owner") - .annotate(total_members=member_count) - .annotate(total_issues=issue_count) - .filter( - workspace_member__member=request.user, - workspace_member__is_active=True, - ) - .distinct() - ) - workspaces = WorkSpaceSerializer( - self.filter_queryset(workspace), - fields=fields if fields else None, - many=True, - ).data - return Response(workspaces, status=status.HTTP_200_OK) - - -class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): - def get(self, request): - slug = request.GET.get("slug", False) - - if not slug or slug == "": - return Response( - {"error": "Workspace Slug is required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.filter(slug=slug).exists() - return Response({"status": not workspace}, status=status.HTTP_200_OK) - - -class WorkspaceInvitationsViewset(BaseViewSet): - """Endpoint for creating, listing and deleting workspaces""" - - serializer_class = WorkSpaceMemberInviteSerializer - model = WorkspaceMemberInvite - - permission_classes = [ - WorkSpaceAdminPermission, - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "workspace__owner", "created_by") - ) - - def create(self, request, slug): - emails = request.data.get("emails", []) - # Check if email is provided - if not emails: - return Response( - {"error": "Emails are required"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # check for role level of the requesting user - requesting_user = WorkspaceMember.objects.get( - workspace__slug=slug, - member=request.user, - is_active=True, - ) - - # Check if any invited user has an higher role - if len( - [ - email - for email in emails - if int(email.get("role", 10)) > requesting_user.role - ] - ): - return Response( - {"error": "You cannot invite a user with higher role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get the workspace object - workspace = Workspace.objects.get(slug=slug) - - # Check if user is already a member of workspace - workspace_members = WorkspaceMember.objects.filter( - workspace_id=workspace.id, - member__email__in=[email.get("email") for email in emails], - is_active=True, - ).select_related("member", "workspace", "workspace__owner") - - if workspace_members: - return Response( - { - "error": "Some users are already member of workspace", - "workspace_users": WorkSpaceMemberSerializer( - workspace_members, many=True - ).data, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace_invitations = [] - for email in emails: - try: - validate_email(email.get("email")) - workspace_invitations.append( - WorkspaceMemberInvite( - email=email.get("email").strip().lower(), - workspace_id=workspace.id, - token=jwt.encode( - { - "email": email, - "timestamp": datetime.now().timestamp(), - }, - settings.SECRET_KEY, - algorithm="HS256", - ), - role=email.get("role", 10), - created_by=request.user, - ) - ) - except ValidationError: - return Response( - { - "error": f"Invalid email - {email} provided a valid email address is required to send the invite" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - # Create workspace member invite - workspace_invitations = WorkspaceMemberInvite.objects.bulk_create( - workspace_invitations, batch_size=10, ignore_conflicts=True - ) - - current_site = request.META.get("HTTP_ORIGIN") - - # Send invitations - for invitation in workspace_invitations: - workspace_invitation.delay( - invitation.email, - workspace.id, - invitation.token, - current_site, - request.user.email, - ) - - return Response( - { - "message": "Emails sent successfully", - }, - status=status.HTTP_200_OK, - ) - - def destroy(self, request, slug, pk): - workspace_member_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug - ) - workspace_member_invite.delete() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class WorkspaceJoinEndpoint(BaseAPIView): - permission_classes = [ - AllowAny, - ] - """Invitation response endpoint the user can respond to the invitation""" - - def post(self, request, slug, pk): - workspace_invite = WorkspaceMemberInvite.objects.get( - pk=pk, workspace__slug=slug - ) - - email = request.data.get("email", "") - - # Check the email - if email == "" or workspace_invite.email != email: - return Response( - {"error": "You do not have permission to join the workspace"}, - status=status.HTTP_403_FORBIDDEN, - ) - - # If already responded then return error - if workspace_invite.responded_at is None: - workspace_invite.accepted = request.data.get("accepted", False) - workspace_invite.responded_at = timezone.now() - workspace_invite.save() - - if workspace_invite.accepted: - # Check if the user created account after invitation - user = User.objects.filter(email=email).first() - - # If the user is present then create the workspace member - if user is not None: - # Check if the user was already a member of workspace then activate the user - workspace_member = WorkspaceMember.objects.filter( - workspace=workspace_invite.workspace, member=user - ).first() - if workspace_member is not None: - workspace_member.is_active = True - workspace_member.role = workspace_invite.role - workspace_member.save() - else: - # Create a Workspace - _ = WorkspaceMember.objects.create( - workspace=workspace_invite.workspace, - member=user, - role=workspace_invite.role, - ) - - # Set the user last_workspace_id to the accepted workspace - user.last_workspace_id = workspace_invite.workspace.id - user.save() - - # Delete the invitation - workspace_invite.delete() - - # Send event - workspace_invite_event.delay( - user=user.id if user is not None else None, - email=email, - user_agent=request.META.get("HTTP_USER_AGENT"), - ip=request.META.get("REMOTE_ADDR"), - event_name="MEMBER_ACCEPTED", - accepted_from="EMAIL", - ) - - return Response( - {"message": "Workspace Invitation Accepted"}, - status=status.HTTP_200_OK, - ) - - # Workspace invitation rejected - return Response( - {"message": "Workspace Invitation was not accepted"}, - status=status.HTTP_200_OK, - ) - - return Response( - {"error": "You have already responded to the invitation request"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - def get(self, request, slug, pk): - workspace_invitation = WorkspaceMemberInvite.objects.get( - workspace__slug=slug, pk=pk - ) - serializer = WorkSpaceMemberInviteSerializer(workspace_invitation) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class UserWorkspaceInvitationsViewSet(BaseViewSet): - serializer_class = WorkSpaceMemberInviteSerializer - model = WorkspaceMemberInvite - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(email=self.request.user.email) - .select_related("workspace", "workspace__owner", "created_by") - .annotate(total_members=Count("workspace__workspace_member")) - ) - - def create(self, request): - invitations = request.data.get("invitations", []) - workspace_invitations = WorkspaceMemberInvite.objects.filter( - pk__in=invitations, email=request.user.email - ).order_by("-created_at") - - # If the user is already a member of workspace and was deactivated then activate the user - for invitation in workspace_invitations: - # Update the WorkspaceMember for this specific invitation - WorkspaceMember.objects.filter( - workspace_id=invitation.workspace_id, member=request.user - ).update(is_active=True, role=invitation.role) - - # Bulk create the user for all the workspaces - WorkspaceMember.objects.bulk_create( - [ - WorkspaceMember( - workspace=invitation.workspace, - member=request.user, - role=invitation.role, - created_by=request.user, - ) - for invitation in workspace_invitations - ], - ignore_conflicts=True, - ) - - # Delete joined workspace invites - workspace_invitations.delete() - - return Response(status=status.HTTP_204_NO_CONTENT) - - -class WorkSpaceMemberViewSet(BaseViewSet): - serializer_class = WorkspaceMemberAdminSerializer - model = WorkspaceMember - - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get_permissions(self): - if self.action == "leave": - self.permission_classes = [ - WorkspaceUserPermission, - ] - else: - self.permission_classes = [ - WorkspaceEntityPermission, - ] - - return super(WorkSpaceMemberViewSet, self).get_permissions() - - search_fields = [ - "member__display_name", - "member__first_name", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter( - workspace__slug=self.kwargs.get("slug"), - is_active=True, - ) - .select_related("workspace", "workspace__owner") - .select_related("member") - ) - - def list(self, request, slug): - workspace_member = WorkspaceMember.objects.get( - member=request.user, - workspace__slug=slug, - is_active=True, - ) - - # Get all active workspace members - workspace_members = self.get_queryset() - - if workspace_member.role > 10: - serializer = WorkspaceMemberAdminSerializer( - workspace_members, - fields=("id", "member", "role"), - many=True, - ) - else: - serializer = WorkSpaceMemberSerializer( - workspace_members, - fields=("id", "member", "role"), - many=True, - ) - return Response(serializer.data, status=status.HTTP_200_OK) - - def partial_update(self, request, slug, pk): - workspace_member = WorkspaceMember.objects.get( - pk=pk, - workspace__slug=slug, - member__is_bot=False, - is_active=True, - ) - if request.user.id == workspace_member.member_id: - return Response( - {"error": "You cannot update your own role"}, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Get the requested user role - requested_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, - member=request.user, - is_active=True, - ) - # Check if role is being updated - # One cannot update role higher than his own role - if ( - "role" in request.data - and int(request.data.get("role", workspace_member.role)) - > requested_workspace_member.role - ): - return Response( - { - "error": "You cannot update a role that is higher than your own role" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - serializer = WorkSpaceMemberSerializer( - workspace_member, data=request.data, partial=True - ) - - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_200_OK) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - def destroy(self, request, slug, pk): - # Check the user role who is deleting the user - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, - pk=pk, - member__is_bot=False, - is_active=True, - ) - - # check requesting user role - requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, - member=request.user, - is_active=True, - ) - - if str(workspace_member.id) == str(requesting_workspace_member.id): - return Response( - { - "error": "You cannot remove yourself from the workspace. Please use leave workspace" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - if requesting_workspace_member.role < workspace_member.role: - return Response( - { - "error": "You cannot remove a user having role higher than you" - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - if ( - Project.objects.annotate( - total_members=Count("project_projectmember"), - member_with_role=Count( - "project_projectmember", - filter=Q( - project_projectmember__member_id=workspace_member.id, - project_projectmember__role=20, - ), - ), - ) - .filter(total_members=1, member_with_role=1, workspace__slug=slug) - .exists() - ): - return Response( - { - "error": "User is a part of some projects where they are the only admin, they should either leave that project or promote another user to admin." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # Deactivate the users from the projects where the user is part of - _ = ProjectMember.objects.filter( - workspace__slug=slug, - member_id=workspace_member.member_id, - is_active=True, - ).update(is_active=False) - - workspace_member.is_active = False - workspace_member.save() - return Response(status=status.HTTP_204_NO_CONTENT) - - def leave(self, request, slug): - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, - member=request.user, - is_active=True, - ) - - # Check if the leaving user is the only admin of the workspace - if ( - workspace_member.role == 20 - and not WorkspaceMember.objects.filter( - workspace__slug=slug, - role=20, - is_active=True, - ).count() - > 1 - ): - return Response( - { - "error": "You cannot leave the workspace as you are the only admin of the workspace you will have to either delete the workspace or promote another user to admin." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - if ( - Project.objects.annotate( - total_members=Count("project_projectmember"), - member_with_role=Count( - "project_projectmember", - filter=Q( - project_projectmember__member_id=request.user.id, - project_projectmember__role=20, - ), - ), - ) - .filter(total_members=1, member_with_role=1, workspace__slug=slug) - .exists() - ): - return Response( - { - "error": "You are a part of some projects where you are the only admin, you should either leave the project or promote another user to admin." - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - # # Deactivate the users from the projects where the user is part of - _ = ProjectMember.objects.filter( - workspace__slug=slug, - member_id=workspace_member.member_id, - is_active=True, - ).update(is_active=False) - - # # Deactivate the user - workspace_member.is_active = False - workspace_member.save() - return Response(status=status.HTTP_204_NO_CONTENT) - - -class WorkspaceProjectMemberEndpoint(BaseAPIView): - serializer_class = ProjectMemberRoleSerializer - model = ProjectMember - - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug): - # Fetch all project IDs where the user is involved - project_ids = ( - ProjectMember.objects.filter( - member=request.user, - is_active=True, - ) - .values_list("project_id", flat=True) - .distinct() - ) - - # Get all the project members in which the user is involved - project_members = ProjectMember.objects.filter( - workspace__slug=slug, - project_id__in=project_ids, - is_active=True, - ).select_related("project", "member", "workspace") - project_members = ProjectMemberRoleSerializer( - project_members, many=True - ).data - - project_members_dict = dict() - - # Construct a dictionary with project_id as key and project_members as value - for project_member in project_members: - project_id = project_member.pop("project") - if str(project_id) not in project_members_dict: - project_members_dict[str(project_id)] = [] - project_members_dict[str(project_id)].append(project_member) - - return Response(project_members_dict, status=status.HTTP_200_OK) - - -class TeamMemberViewSet(BaseViewSet): - serializer_class = TeamSerializer - model = Team - permission_classes = [ - WorkSpaceAdminPermission, - ] - - search_fields = [ - "member__display_name", - "member__first_name", - ] - - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .select_related("workspace", "workspace__owner") - .prefetch_related("members") - ) - - def create(self, request, slug): - members = list( - WorkspaceMember.objects.filter( - workspace__slug=slug, - member__id__in=request.data.get("members", []), - is_active=True, - ) - .annotate(member_str_id=Cast("member", output_field=CharField())) - .distinct() - .values_list("member_str_id", flat=True) - ) - - if len(members) != len(request.data.get("members", [])): - users = list( - set(request.data.get("members", [])).difference(members) - ) - users = User.objects.filter(pk__in=users) - - serializer = UserLiteSerializer(users, many=True) - return Response( - { - "error": f"{len(users)} of the member(s) are not a part of the workspace", - "members": serializer.data, - }, - status=status.HTTP_400_BAD_REQUEST, - ) - - workspace = Workspace.objects.get(slug=slug) - - serializer = TeamSerializer( - data=request.data, context={"workspace": workspace} - ) - if serializer.is_valid(): - serializer.save() - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - -class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): - def get(self, request): - user = User.objects.get(pk=request.user.id) - - last_workspace_id = user.last_workspace_id - - if last_workspace_id is None: - return Response( - { - "project_details": [], - "workspace_details": {}, - }, - status=status.HTTP_200_OK, - ) - - workspace = Workspace.objects.get(pk=last_workspace_id) - workspace_serializer = WorkSpaceSerializer(workspace) - - project_member = ProjectMember.objects.filter( - workspace_id=last_workspace_id, member=request.user - ).select_related("workspace", "project", "member", "workspace__owner") - - project_member_serializer = ProjectMemberSerializer( - project_member, many=True - ) - - return Response( - { - "workspace_details": workspace_serializer.data, - "project_details": project_member_serializer.data, - }, - status=status.HTTP_200_OK, - ) - - -class WorkspaceMemberUserEndpoint(BaseAPIView): - def get(self, request, slug): - workspace_member = WorkspaceMember.objects.get( - member=request.user, - workspace__slug=slug, - is_active=True, - ) - serializer = WorkspaceMemberMeSerializer(workspace_member) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class WorkspaceMemberUserViewsEndpoint(BaseAPIView): - def post(self, request, slug): - workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, - member=request.user, - is_active=True, - ) - workspace_member.view_props = request.data.get("view_props", {}) - workspace_member.save() - - return Response(status=status.HTTP_204_NO_CONTENT) - - -class UserActivityGraphEndpoint(BaseAPIView): - def get(self, request, slug): - issue_activities = ( - IssueActivity.objects.filter( - actor=request.user, - workspace__slug=slug, - created_at__date__gte=date.today() + relativedelta(months=-6), - ) - .annotate(created_date=Cast("created_at", DateField())) - .values("created_date") - .annotate(activity_count=Count("created_date")) - .order_by("created_date") - ) - - return Response(issue_activities, status=status.HTTP_200_OK) - - -class UserIssueCompletedGraphEndpoint(BaseAPIView): - def get(self, request, slug): - month = request.GET.get("month", 1) - - issues = ( - Issue.issue_objects.filter( - assignees__in=[request.user], - workspace__slug=slug, - completed_at__month=month, - completed_at__isnull=False, - ) - .annotate(completed_week=ExtractWeek("completed_at")) - .annotate(week=F("completed_week") % 4) - .values("week") - .annotate(completed_count=Count("completed_week")) - .order_by("week") - ) - - return Response(issues, status=status.HTTP_200_OK) - - -class WeekInMonth(Func): - function = "FLOOR" - template = "(((%(expressions)s - 1) / 7) + 1)::INTEGER" - - -class UserWorkspaceDashboardEndpoint(BaseAPIView): - def get(self, request, slug): - issue_activities = ( - IssueActivity.objects.filter( - actor=request.user, - workspace__slug=slug, - created_at__date__gte=date.today() + relativedelta(months=-3), - ) - .annotate(created_date=Cast("created_at", DateField())) - .values("created_date") - .annotate(activity_count=Count("created_date")) - .order_by("created_date") - ) - - month = request.GET.get("month", 1) - - completed_issues = ( - Issue.issue_objects.filter( - assignees__in=[request.user], - workspace__slug=slug, - completed_at__month=month, - completed_at__isnull=False, - ) - .annotate(day_of_month=ExtractDay("completed_at")) - .annotate(week_in_month=WeekInMonth(F("day_of_month"))) - .values("week_in_month") - .annotate(completed_count=Count("id")) - .order_by("week_in_month") - ) - - assigned_issues = Issue.issue_objects.filter( - workspace__slug=slug, assignees__in=[request.user] - ).count() - - pending_issues_count = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[request.user], - ).count() - - completed_issues_count = Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[request.user], - state__group="completed", - ).count() - - issues_due_week = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[request.user], - ) - .annotate(target_week=ExtractWeek("target_date")) - .filter(target_week=timezone.now().date().isocalendar()[1]) - .count() - ) - - state_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, assignees__in=[request.user] - ) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - overdue_issues = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[request.user], - target_date__lt=timezone.now(), - completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "target_date") - - upcoming_issues = Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - start_date__gte=timezone.now(), - workspace__slug=slug, - assignees__in=[request.user], - completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "start_date") - - return Response( - { - "issue_activities": issue_activities, - "completed_issues": completed_issues, - "assigned_issues_count": assigned_issues, - "pending_issues_count": pending_issues_count, - "completed_issues_count": completed_issues_count, - "issues_due_week_count": issues_due_week, - "state_distribution": state_distribution, - "overdue_issues": overdue_issues, - "upcoming_issues": upcoming_issues, - }, - status=status.HTTP_200_OK, - ) - - -class WorkspaceThemeViewSet(BaseViewSet): - permission_classes = [ - WorkSpaceAdminPermission, - ] - model = WorkspaceTheme - serializer_class = WorkspaceThemeSerializer - - def get_queryset(self): - return ( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - ) - - def create(self, request, slug): - workspace = Workspace.objects.get(slug=slug) - serializer = WorkspaceThemeSerializer(data=request.data) - if serializer.is_valid(): - serializer.save(workspace=workspace, actor=request.user) - return Response(serializer.data, status=status.HTTP_201_CREATED) - return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) - - -class WorkspaceUserProfileStatsEndpoint(BaseAPIView): - def get(self, request, slug, user_id): - filters = issue_filters(request.query_params, "GET") - - state_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - .filter(**filters) - .annotate(state_group=F("state__group")) - .values("state_group") - .annotate(state_count=Count("state_group")) - .order_by("state_group") - ) - - priority_order = ["urgent", "high", "medium", "low", "none"] - - priority_distribution = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - .filter(**filters) - .values("priority") - .annotate(priority_count=Count("priority")) - .filter(priority_count__gte=1) - .annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - default=Value(len(priority_order)), - output_field=IntegerField(), - ) - ) - .order_by("priority_order") - ) - - created_issues = ( - Issue.issue_objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True, - created_by_id=user_id, - ) - .filter(**filters) - .count() - ) - - assigned_issues_count = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True, - ) - .filter(**filters) - .count() - ) - - pending_issues_count = ( - Issue.issue_objects.filter( - ~Q(state__group__in=["completed", "cancelled"]), - workspace__slug=slug, - assignees__in=[user_id], - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True, - ) - .filter(**filters) - .count() - ) - - completed_issues_count = ( - Issue.issue_objects.filter( - workspace__slug=slug, - assignees__in=[user_id], - state__group="completed", - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - .filter(**filters) - .count() - ) - - subscribed_issues_count = ( - IssueSubscriber.objects.filter( - workspace__slug=slug, - subscriber_id=user_id, - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - .filter(**filters) - .count() - ) - - upcoming_cycles = CycleIssue.objects.filter( - workspace__slug=slug, - cycle__start_date__gt=timezone.now().date(), - issue__assignees__in=[ - user_id, - ], - ).values("cycle__name", "cycle__id", "cycle__project_id") - - present_cycle = CycleIssue.objects.filter( - workspace__slug=slug, - cycle__start_date__lt=timezone.now().date(), - cycle__end_date__gt=timezone.now().date(), - issue__assignees__in=[ - user_id, - ], - ).values("cycle__name", "cycle__id", "cycle__project_id") - - return Response( - { - "state_distribution": state_distribution, - "priority_distribution": priority_distribution, - "created_issues": created_issues, - "assigned_issues": assigned_issues_count, - "completed_issues": completed_issues_count, - "pending_issues": pending_issues_count, - "subscribed_issues": subscribed_issues_count, - "present_cycles": present_cycle, - "upcoming_cycles": upcoming_cycles, - } - ) - - -class WorkspaceUserActivityEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug, user_id): - projects = request.query_params.getlist("project", []) - - queryset = IssueActivity.objects.filter( - ~Q(field__in=["comment", "vote", "reaction", "draft"]), - workspace__slug=slug, - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True, - actor=user_id, - ).select_related("actor", "workspace", "issue", "project") - - if projects: - queryset = queryset.filter(project__in=projects) - - return self.paginate( - request=request, - queryset=queryset, - on_results=lambda issue_activities: IssueActivitySerializer( - issue_activities, many=True - ).data, - ) - - -class WorkspaceUserProfileEndpoint(BaseAPIView): - def get(self, request, slug, user_id): - user_data = User.objects.get(pk=user_id) - - requesting_workspace_member = WorkspaceMember.objects.get( - workspace__slug=slug, - member=request.user, - is_active=True, - ) - projects = [] - if requesting_workspace_member.role >= 10: - projects = ( - Project.objects.filter( - workspace__slug=slug, - project_projectmember__member=request.user, - project_projectmember__is_active=True, - ) - .annotate( - created_issues=Count( - "project_issue", - filter=Q( - project_issue__created_by_id=user_id, - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - assigned_issues=Count( - "project_issue", - filter=Q( - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - completed_issues=Count( - "project_issue", - filter=Q( - project_issue__completed_at__isnull=False, - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .annotate( - pending_issues=Count( - "project_issue", - filter=Q( - project_issue__state__group__in=[ - "backlog", - "unstarted", - "started", - ], - project_issue__assignees__in=[user_id], - project_issue__archived_at__isnull=True, - project_issue__is_draft=False, - ), - ) - ) - .values( - "id", - "name", - "identifier", - "emoji", - "icon_prop", - "created_issues", - "assigned_issues", - "completed_issues", - "pending_issues", - ) - ) - - return Response( - { - "project_data": projects, - "user_data": { - "email": user_data.email, - "first_name": user_data.first_name, - "last_name": user_data.last_name, - "avatar": user_data.avatar, - "cover_image": user_data.cover_image, - "date_joined": user_data.date_joined, - "user_timezone": user_data.user_timezone, - "display_name": user_data.display_name, - }, - }, - status=status.HTTP_200_OK, - ) - - -class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def get(self, request, slug, user_id): - fields = [ - field - for field in request.GET.get("fields", "").split(",") - if field - ] - filters = issue_filters(request.query_params, "GET") - - # Custom ordering for priority and state - priority_order = ["urgent", "high", "medium", "low", "none"] - state_order = [ - "backlog", - "unstarted", - "started", - "completed", - "cancelled", - ] - - order_by_param = request.GET.get("order_by", "-created_at") - issue_queryset = ( - Issue.issue_objects.filter( - Q(assignees__in=[user_id]) - | Q(created_by_id=user_id) - | Q(issue_subscribers__subscriber_id=user_id), - workspace__slug=slug, - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - .filter(**filters) - .select_related("workspace", "project", "state", "parent") - .prefetch_related("assignees", "labels", "issue_module__module") - .annotate(cycle_id=F("issue_cycle__cycle_id")) - .annotate( - link_count=IssueLink.objects.filter(issue=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - attachment_count=IssueAttachment.objects.filter( - issue=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - sub_issues_count=Issue.issue_objects.filter( - parent=OuterRef("id") - ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - label_ids=Coalesce( - ArrayAgg( - "labels__id", - distinct=True, - filter=~Q(labels__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - assignee_ids=Coalesce( - ArrayAgg( - "assignees__id", - distinct=True, - filter=~Q(assignees__id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - module_ids=Coalesce( - ArrayAgg( - "issue_module__module_id", - distinct=True, - filter=~Q(issue_module__module_id__isnull=True), - ), - Value([], output_field=ArrayField(UUIDField())), - ), - ) - .order_by("created_at") - ).distinct() - - # Priority Ordering - if order_by_param == "priority" or order_by_param == "-priority": - priority_order = ( - priority_order - if order_by_param == "priority" - else priority_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - priority_order=Case( - *[ - When(priority=p, then=Value(i)) - for i, p in enumerate(priority_order) - ], - output_field=CharField(), - ) - ).order_by("priority_order") - - # State Ordering - elif order_by_param in [ - "state__name", - "state__group", - "-state__name", - "-state__group", - ]: - state_order = ( - state_order - if order_by_param in ["state__name", "state__group"] - else state_order[::-1] - ) - issue_queryset = issue_queryset.annotate( - state_order=Case( - *[ - When(state__group=state_group, then=Value(i)) - for i, state_group in enumerate(state_order) - ], - default=Value(len(state_order)), - output_field=CharField(), - ) - ).order_by("state_order") - # assignee and label ordering - elif order_by_param in [ - "labels__name", - "-labels__name", - "assignees__first_name", - "-assignees__first_name", - ]: - issue_queryset = issue_queryset.annotate( - max_values=Max( - order_by_param[1::] - if order_by_param.startswith("-") - else order_by_param - ) - ).order_by( - "-max_values" - if order_by_param.startswith("-") - else "max_values" - ) - else: - issue_queryset = issue_queryset.order_by(order_by_param) - - issues = IssueSerializer( - issue_queryset, many=True, fields=fields if fields else None - ).data - return Response(issues, status=status.HTTP_200_OK) - - -class WorkspaceLabelsEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def get(self, request, slug): - labels = Label.objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - serializer = LabelSerializer(labels, many=True).data - return Response(serializer, status=status.HTTP_200_OK) - - -class WorkspaceStatesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug): - states = State.objects.filter( - workspace__slug=slug, - project__project_projectmember__member=request.user, - project__project_projectmember__is_active=True - ) - serializer = StateSerializer(states, many=True).data - return Response(serializer, status=status.HTTP_200_OK) - - -class WorkspaceEstimatesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceEntityPermission, - ] - - def get(self, request, slug): - estimate_ids = Project.objects.filter( - workspace__slug=slug, estimate__isnull=False - ).values_list("estimate_id", flat=True) - estimates = Estimate.objects.filter( - pk__in=estimate_ids - ).prefetch_related( - Prefetch( - "points", - queryset=EstimatePoint.objects.select_related( - "estimate", "workspace", "project" - ), - ) - ) - serializer = WorkspaceEstimateSerializer(estimates, many=True) - return Response(serializer.data, status=status.HTTP_200_OK) - - -class WorkspaceModulesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def get(self, request, slug): - modules = ( - Module.objects.filter(workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("lead") - .prefetch_related("members") - .prefetch_related( - Prefetch( - "link_module", - queryset=ModuleLink.objects.select_related( - "module", "created_by" - ), - ) - ) - .annotate( - total_issues=Count( - "issue_module", - filter=Q( - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ), - ) - .annotate( - completed_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="completed", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ) - ) - .annotate( - cancelled_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="cancelled", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ) - ) - .annotate( - started_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="started", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ) - ) - .annotate( - unstarted_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="unstarted", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ) - ) - .annotate( - backlog_issues=Count( - "issue_module__issue__state__group", - filter=Q( - issue_module__issue__state__group="backlog", - issue_module__issue__archived_at__isnull=True, - issue_module__issue__is_draft=False, - ), - ) - ) - .order_by(self.kwargs.get("order_by", "-created_at")) - ) - - serializer = ModuleSerializer(modules, many=True).data - return Response(serializer, status=status.HTTP_200_OK) - - -class WorkspaceCyclesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def get(self, request, slug): - cycles = ( - Cycle.objects.filter(workspace__slug=slug) - .select_related("project") - .select_related("workspace") - .select_related("owned_by") - .annotate( - total_issues=Count( - "issue_cycle", - filter=Q( - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - completed_issues=Count( - "issue_cycle__issue__state__group", - filter=Q( - issue_cycle__issue__state__group="completed", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - cancelled_issues=Count( - "issue_cycle__issue__state__group", - filter=Q( - issue_cycle__issue__state__group="cancelled", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - started_issues=Count( - "issue_cycle__issue__state__group", - filter=Q( - issue_cycle__issue__state__group="started", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - unstarted_issues=Count( - "issue_cycle__issue__state__group", - filter=Q( - issue_cycle__issue__state__group="unstarted", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - backlog_issues=Count( - "issue_cycle__issue__state__group", - filter=Q( - issue_cycle__issue__state__group="backlog", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - total_estimates=Sum("issue_cycle__issue__estimate_point") - ) - .annotate( - completed_estimates=Sum( - "issue_cycle__issue__estimate_point", - filter=Q( - issue_cycle__issue__state__group="completed", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .annotate( - started_estimates=Sum( - "issue_cycle__issue__estimate_point", - filter=Q( - issue_cycle__issue__state__group="started", - issue_cycle__issue__archived_at__isnull=True, - issue_cycle__issue__is_draft=False, - ), - ) - ) - .order_by(self.kwargs.get("order_by", "-created_at")) - .distinct() - ) - serializer = CycleSerializer(cycles, many=True).data - return Response(serializer, status=status.HTTP_200_OK) - - -class WorkspaceUserPropertiesEndpoint(BaseAPIView): - permission_classes = [ - WorkspaceViewerPermission, - ] - - def patch(self, request, slug): - workspace_properties = WorkspaceUserProperties.objects.get( - user=request.user, - workspace__slug=slug, - ) - - workspace_properties.filters = request.data.get( - "filters", workspace_properties.filters - ) - workspace_properties.display_filters = request.data.get( - "display_filters", workspace_properties.display_filters - ) - workspace_properties.display_properties = request.data.get( - "display_properties", workspace_properties.display_properties - ) - workspace_properties.save() - - serializer = WorkspaceUserPropertiesSerializer(workspace_properties) - return Response(serializer.data, status=status.HTTP_201_CREATED) - - def get(self, request, slug): - ( - workspace_properties, - _, - ) = WorkspaceUserProperties.objects.get_or_create( - user=request.user, workspace__slug=slug - ) - serializer = WorkspaceUserPropertiesSerializer(workspace_properties) - return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace/base.py b/apiserver/plane/app/views/workspace/base.py new file mode 100644 index 00000000000..45966629180 --- /dev/null +++ b/apiserver/plane/app/views/workspace/base.py @@ -0,0 +1,418 @@ +# Python imports +import csv +import io +from datetime import date + +from dateutil.relativedelta import relativedelta +from django.db import IntegrityError +from django.db.models import ( + Count, + F, + Func, + OuterRef, + Prefetch, + Q, +) +from django.db.models.fields import DateField +from django.db.models.functions import Cast, ExtractDay, ExtractWeek + +# Django imports +from django.http import HttpResponse +from django.utils import timezone + +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +from plane.app.permissions import ( + WorkSpaceAdminPermission, + WorkSpaceBasePermission, + WorkspaceEntityPermission, +) + +# Module imports +from plane.app.serializers import ( + WorkSpaceSerializer, + WorkspaceThemeSerializer, +) +from plane.app.views.base import BaseAPIView, BaseViewSet +from plane.db.models import ( + Issue, + IssueActivity, + Workspace, + WorkspaceMember, + WorkspaceTheme, +) +from plane.utils.cache import cache_response, invalidate_cache + + +class WorkSpaceViewSet(BaseViewSet): + model = Workspace + serializer_class = WorkSpaceSerializer + permission_classes = [ + WorkSpaceBasePermission, + ] + + search_fields = [ + "name", + ] + filterset_fields = [ + "owner", + ] + + lookup_field = "slug" + + def get_queryset(self): + member_count = ( + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + issue_count = ( + Issue.issue_objects.filter(workspace=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + return ( + self.filter_queryset( + super().get_queryset().select_related("owner") + ) + .order_by("name") + .filter( + workspace_member__member=self.request.user, + workspace_member__is_active=True, + ) + .annotate(total_members=member_count) + .annotate(total_issues=issue_count) + .select_related("owner") + ) + + @invalidate_cache(path="/api/workspaces/", user=False) + @invalidate_cache(path="/api/users/me/workspaces/") + def create(self, request): + try: + serializer = WorkSpaceSerializer(data=request.data) + + slug = request.data.get("slug", False) + name = request.data.get("name", False) + + if not name or not slug: + return Response( + {"error": "Both name and slug are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if len(name) > 80 or len(slug) > 48: + return Response( + { + "error": "The maximum length for name is 80 and for slug is 48" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if serializer.is_valid(): + serializer.save(owner=request.user) + # Create Workspace member + _ = WorkspaceMember.objects.create( + workspace_id=serializer.data["id"], + member=request.user, + role=20, + company_role=request.data.get("company_role", ""), + ) + return Response( + serializer.data, status=status.HTTP_201_CREATED + ) + return Response( + [serializer.errors[error][0] for error in serializer.errors], + status=status.HTTP_400_BAD_REQUEST, + ) + + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"slug": "The workspace with the slug already exists"}, + status=status.HTTP_410_GONE, + ) + + @cache_response(60 * 60 * 2) + def list(self, request, *args, **kwargs): + return super().list(request, *args, **kwargs) + + @invalidate_cache(path="/api/workspaces/", user=False) + @invalidate_cache(path="/api/users/me/workspaces/") + def partial_update(self, request, *args, **kwargs): + return super().partial_update(request, *args, **kwargs) + + @invalidate_cache(path="/api/workspaces/", user=False) + @invalidate_cache(path="/api/users/me/workspaces/") + @invalidate_cache(path="/api/users/me/settings/") + def destroy(self, request, *args, **kwargs): + return super().destroy(request, *args, **kwargs) + + +class UserWorkSpacesEndpoint(BaseAPIView): + search_fields = [ + "name", + ] + filterset_fields = [ + "owner", + ] + + @cache_response(60 * 60 * 2) + def get(self, request): + fields = [ + field + for field in request.GET.get("fields", "").split(",") + if field + ] + member_count = ( + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), + member__is_bot=False, + is_active=True, + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + issue_count = ( + Issue.issue_objects.filter(workspace=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + + workspace = ( + Workspace.objects.prefetch_related( + Prefetch( + "workspace_member", + queryset=WorkspaceMember.objects.filter( + member=request.user, is_active=True + ), + ) + ) + .select_related("owner") + .annotate(total_members=member_count) + .annotate(total_issues=issue_count) + .filter( + workspace_member__member=request.user, + workspace_member__is_active=True, + ) + .distinct() + ) + workspaces = WorkSpaceSerializer( + self.filter_queryset(workspace), + fields=fields if fields else None, + many=True, + ).data + return Response(workspaces, status=status.HTTP_200_OK) + + +class WorkSpaceAvailabilityCheckEndpoint(BaseAPIView): + def get(self, request): + slug = request.GET.get("slug", False) + + if not slug or slug == "": + return Response( + {"error": "Workspace Slug is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.filter(slug=slug).exists() + return Response({"status": not workspace}, status=status.HTTP_200_OK) + + +class WeekInMonth(Func): + function = "FLOOR" + template = "(((%(expressions)s - 1) / 7) + 1)::INTEGER" + + +class UserWorkspaceDashboardEndpoint(BaseAPIView): + def get(self, request, slug): + issue_activities = ( + IssueActivity.objects.filter( + actor=request.user, + workspace__slug=slug, + created_at__date__gte=date.today() + relativedelta(months=-3), + ) + .annotate(created_date=Cast("created_at", DateField())) + .values("created_date") + .annotate(activity_count=Count("created_date")) + .order_by("created_date") + ) + + month = request.GET.get("month", 1) + + completed_issues = ( + Issue.issue_objects.filter( + assignees__in=[request.user], + workspace__slug=slug, + completed_at__month=month, + completed_at__isnull=False, + ) + .annotate(day_of_month=ExtractDay("completed_at")) + .annotate(week_in_month=WeekInMonth(F("day_of_month"))) + .values("week_in_month") + .annotate(completed_count=Count("id")) + .order_by("week_in_month") + ) + + assigned_issues = Issue.issue_objects.filter( + workspace__slug=slug, assignees__in=[request.user] + ).count() + + pending_issues_count = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[request.user], + ).count() + + completed_issues_count = Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[request.user], + state__group="completed", + ).count() + + issues_due_week = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[request.user], + ) + .annotate(target_week=ExtractWeek("target_date")) + .filter(target_week=timezone.now().date().isocalendar()[1]) + .count() + ) + + state_distribution = ( + Issue.issue_objects.filter( + workspace__slug=slug, assignees__in=[request.user] + ) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + overdue_issues = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[request.user], + target_date__lt=timezone.now(), + completed_at__isnull=True, + ).values("id", "name", "workspace__slug", "project_id", "target_date") + + upcoming_issues = Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + start_date__gte=timezone.now(), + workspace__slug=slug, + assignees__in=[request.user], + completed_at__isnull=True, + ).values("id", "name", "workspace__slug", "project_id", "start_date") + + return Response( + { + "issue_activities": issue_activities, + "completed_issues": completed_issues, + "assigned_issues_count": assigned_issues, + "pending_issues_count": pending_issues_count, + "completed_issues_count": completed_issues_count, + "issues_due_week_count": issues_due_week, + "state_distribution": state_distribution, + "overdue_issues": overdue_issues, + "upcoming_issues": upcoming_issues, + }, + status=status.HTTP_200_OK, + ) + + +class WorkspaceThemeViewSet(BaseViewSet): + permission_classes = [ + WorkSpaceAdminPermission, + ] + model = WorkspaceTheme + serializer_class = WorkspaceThemeSerializer + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + ) + + def create(self, request, slug): + workspace = Workspace.objects.get(slug=slug) + serializer = WorkspaceThemeSerializer(data=request.data) + if serializer.is_valid(): + serializer.save(workspace=workspace, actor=request.user) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class ExportWorkspaceUserActivityEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def generate_csv_from_rows(self, rows): + """Generate CSV buffer from rows.""" + csv_buffer = io.StringIO() + writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + [writer.writerow(row) for row in rows] + csv_buffer.seek(0) + return csv_buffer + + def post(self, request, slug, user_id): + + if not request.data.get("date"): + return Response( + {"error": "Date is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + user_activities = IssueActivity.objects.filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + workspace__slug=slug, + created_at__date=request.data.get("date"), + project__project_projectmember__member=request.user, + actor_id=user_id, + ).select_related("actor", "workspace", "issue", "project")[:10000] + + header = [ + "Actor name", + "Issue ID", + "Project", + "Created at", + "Updated at", + "Action", + "Field", + "Old value", + "New value", + ] + rows = [ + ( + activity.actor.display_name, + f"{activity.project.identifier} - {activity.issue.sequence_id if activity.issue else ''}", + activity.project.name, + activity.created_at, + activity.updated_at, + activity.verb, + activity.field, + activity.old_value, + activity.new_value, + ) + for activity in user_activities + ] + csv_buffer = self.generate_csv_from_rows([header] + rows) + response = HttpResponse(csv_buffer.getvalue(), content_type="text/csv") + response["Content-Disposition"] = ( + 'attachment; filename="workspace-user-activity.csv"' + ) + return response diff --git a/apiserver/plane/app/views/workspace/cycle.py b/apiserver/plane/app/views/workspace/cycle.py new file mode 100644 index 00000000000..ea081cf9930 --- /dev/null +++ b/apiserver/plane/app/views/workspace/cycle.py @@ -0,0 +1,116 @@ +# Django imports +from django.db.models import ( + Q, + Count, + Sum, +) + +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.views.base import BaseAPIView +from plane.db.models import Cycle +from plane.app.permissions import WorkspaceViewerPermission +from plane.app.serializers.cycle import CycleSerializer + + +class WorkspaceCyclesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + def get(self, request, slug): + cycles = ( + Cycle.objects.filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("owned_by") + .annotate( + total_issues=Count( + "issue_cycle", + filter=Q( + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + cancelled_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="cancelled", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + unstarted_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="unstarted", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + backlog_issues=Count( + "issue_cycle__issue__state__group", + filter=Q( + issue_cycle__issue__state__group="backlog", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + total_estimates=Sum("issue_cycle__issue__estimate_point") + ) + .annotate( + completed_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="completed", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .annotate( + started_estimates=Sum( + "issue_cycle__issue__estimate_point", + filter=Q( + issue_cycle__issue__state__group="started", + issue_cycle__issue__archived_at__isnull=True, + issue_cycle__issue__is_draft=False, + ), + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + .distinct() + ) + serializer = CycleSerializer(cycles, many=True).data + return Response(serializer, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace/estimate.py b/apiserver/plane/app/views/workspace/estimate.py new file mode 100644 index 00000000000..59a23d86778 --- /dev/null +++ b/apiserver/plane/app/views/workspace/estimate.py @@ -0,0 +1,30 @@ +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.permissions import WorkspaceEntityPermission +from plane.app.serializers import WorkspaceEstimateSerializer +from plane.app.views.base import BaseAPIView +from plane.db.models import Estimate, Project +from plane.utils.cache import cache_response + + +class WorkspaceEstimatesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceEntityPermission, + ] + + @cache_response(60 * 60 * 2) + def get(self, request, slug): + estimate_ids = Project.objects.filter( + workspace__slug=slug, estimate__isnull=False + ).values_list("estimate_id", flat=True) + estimates = ( + Estimate.objects.filter(pk__in=estimate_ids, workspace__slug=slug) + .prefetch_related("points") + .select_related("workspace", "project") + ) + + serializer = WorkspaceEstimateSerializer(estimates, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace/invite.py b/apiserver/plane/app/views/workspace/invite.py new file mode 100644 index 00000000000..807c060ad21 --- /dev/null +++ b/apiserver/plane/app/views/workspace/invite.py @@ -0,0 +1,301 @@ +# Python imports +import jwt +from datetime import datetime + +# Django imports +from django.conf import settings +from django.utils import timezone +from django.db.models import Count +from django.core.exceptions import ValidationError +from django.core.validators import validate_email + +# Third party modules +from rest_framework import status +from rest_framework.response import Response +from rest_framework.permissions import AllowAny + +# Module imports +from plane.app.serializers import ( + WorkSpaceMemberSerializer, + WorkSpaceMemberInviteSerializer, +) +from plane.app.views.base import BaseAPIView +from .. import BaseViewSet +from plane.db.models import ( + User, + Workspace, + WorkspaceMemberInvite, + WorkspaceMember, +) +from plane.app.permissions import WorkSpaceAdminPermission +from plane.bgtasks.workspace_invitation_task import workspace_invitation +from plane.bgtasks.event_tracking_task import workspace_invite_event +from plane.utils.cache import invalidate_cache + +class WorkspaceInvitationsViewset(BaseViewSet): + """Endpoint for creating, listing and deleting workspaces""" + + serializer_class = WorkSpaceMemberInviteSerializer + model = WorkspaceMemberInvite + + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "workspace__owner", "created_by") + ) + + def create(self, request, slug): + emails = request.data.get("emails", []) + # Check if email is provided + if not emails: + return Response( + {"error": "Emails are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # check for role level of the requesting user + requesting_user = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + # Check if any invited user has an higher role + if len( + [ + email + for email in emails + if int(email.get("role", 10)) > requesting_user.role + ] + ): + return Response( + {"error": "You cannot invite a user with higher role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the workspace object + workspace = Workspace.objects.get(slug=slug) + + # Check if user is already a member of workspace + workspace_members = WorkspaceMember.objects.filter( + workspace_id=workspace.id, + member__email__in=[email.get("email") for email in emails], + is_active=True, + ).select_related("member", "workspace", "workspace__owner") + + if workspace_members: + return Response( + { + "error": "Some users are already member of workspace", + "workspace_users": WorkSpaceMemberSerializer( + workspace_members, many=True + ).data, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace_invitations = [] + for email in emails: + try: + validate_email(email.get("email")) + workspace_invitations.append( + WorkspaceMemberInvite( + email=email.get("email").strip().lower(), + workspace_id=workspace.id, + token=jwt.encode( + { + "email": email, + "timestamp": datetime.now().timestamp(), + }, + settings.SECRET_KEY, + algorithm="HS256", + ), + role=email.get("role", 10), + created_by=request.user, + ) + ) + except ValidationError: + return Response( + { + "error": f"Invalid email - {email} provided a valid email address is required to send the invite" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + # Create workspace member invite + workspace_invitations = WorkspaceMemberInvite.objects.bulk_create( + workspace_invitations, batch_size=10, ignore_conflicts=True + ) + + current_site = request.META.get("HTTP_ORIGIN") + + # Send invitations + for invitation in workspace_invitations: + workspace_invitation.delay( + invitation.email, + workspace.id, + invitation.token, + current_site, + request.user.email, + ) + + return Response( + { + "message": "Emails sent successfully", + }, + status=status.HTTP_200_OK, + ) + + def destroy(self, request, slug, pk): + workspace_member_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + workspace_member_invite.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkspaceJoinEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + """Invitation response endpoint the user can respond to the invitation""" + + @invalidate_cache(path="/api/workspaces/", user=False) + @invalidate_cache(path="/api/users/me/workspaces/") + def post(self, request, slug, pk): + workspace_invite = WorkspaceMemberInvite.objects.get( + pk=pk, workspace__slug=slug + ) + + email = request.data.get("email", "") + + # Check the email + if email == "" or workspace_invite.email != email: + return Response( + {"error": "You do not have permission to join the workspace"}, + status=status.HTTP_403_FORBIDDEN, + ) + + # If already responded then return error + if workspace_invite.responded_at is None: + workspace_invite.accepted = request.data.get("accepted", False) + workspace_invite.responded_at = timezone.now() + workspace_invite.save() + + if workspace_invite.accepted: + # Check if the user created account after invitation + user = User.objects.filter(email=email).first() + + # If the user is present then create the workspace member + if user is not None: + # Check if the user was already a member of workspace then activate the user + workspace_member = WorkspaceMember.objects.filter( + workspace=workspace_invite.workspace, member=user + ).first() + if workspace_member is not None: + workspace_member.is_active = True + workspace_member.role = workspace_invite.role + workspace_member.save() + else: + # Create a Workspace + _ = WorkspaceMember.objects.create( + workspace=workspace_invite.workspace, + member=user, + role=workspace_invite.role, + ) + + # Set the user last_workspace_id to the accepted workspace + user.last_workspace_id = workspace_invite.workspace.id + user.save() + + # Delete the invitation + workspace_invite.delete() + + # Send event + workspace_invite_event.delay( + user=user.id if user is not None else None, + email=email, + user_agent=request.META.get("HTTP_USER_AGENT"), + ip=request.META.get("REMOTE_ADDR"), + event_name="MEMBER_ACCEPTED", + accepted_from="EMAIL", + ) + + return Response( + {"message": "Workspace Invitation Accepted"}, + status=status.HTTP_200_OK, + ) + + # Workspace invitation rejected + return Response( + {"message": "Workspace Invitation was not accepted"}, + status=status.HTTP_200_OK, + ) + + return Response( + {"error": "You have already responded to the invitation request"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug, pk): + workspace_invitation = WorkspaceMemberInvite.objects.get( + workspace__slug=slug, pk=pk + ) + serializer = WorkSpaceMemberInviteSerializer(workspace_invitation) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class UserWorkspaceInvitationsViewSet(BaseViewSet): + serializer_class = WorkSpaceMemberInviteSerializer + model = WorkspaceMemberInvite + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(email=self.request.user.email) + .select_related("workspace", "workspace__owner", "created_by") + .annotate(total_members=Count("workspace__workspace_member")) + ) + + @invalidate_cache(path="/api/workspaces/", user=False) + @invalidate_cache(path="/api/users/me/workspaces/") + @invalidate_cache( + path="/api/workspaces/:slug/members/", url_params=True, user=False + ) + def create(self, request): + invitations = request.data.get("invitations", []) + workspace_invitations = WorkspaceMemberInvite.objects.filter( + pk__in=invitations, email=request.user.email + ).order_by("-created_at") + + # If the user is already a member of workspace and was deactivated then activate the user + for invitation in workspace_invitations: + # Update the WorkspaceMember for this specific invitation + WorkspaceMember.objects.filter( + workspace_id=invitation.workspace_id, member=request.user + ).update(is_active=True, role=invitation.role) + + # Bulk create the user for all the workspaces + WorkspaceMember.objects.bulk_create( + [ + WorkspaceMember( + workspace=invitation.workspace, + member=request.user, + role=invitation.role, + created_by=request.user, + ) + for invitation in workspace_invitations + ], + ignore_conflicts=True, + ) + + # Delete joined workspace invites + workspace_invitations.delete() + + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/workspace/label.py b/apiserver/plane/app/views/workspace/label.py new file mode 100644 index 00000000000..328f3f8c132 --- /dev/null +++ b/apiserver/plane/app/views/workspace/label.py @@ -0,0 +1,26 @@ +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.serializers import LabelSerializer +from plane.app.views.base import BaseAPIView +from plane.db.models import Label +from plane.app.permissions import WorkspaceViewerPermission +from plane.utils.cache import cache_response + +class WorkspaceLabelsEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + @cache_response(60 * 60 * 2) + def get(self, request, slug): + labels = Label.objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + serializer = LabelSerializer(labels, many=True).data + return Response(serializer, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace/member.py b/apiserver/plane/app/views/workspace/member.py new file mode 100644 index 00000000000..5afe3714428 --- /dev/null +++ b/apiserver/plane/app/views/workspace/member.py @@ -0,0 +1,400 @@ +# Django imports +from django.db.models import ( + CharField, + Count, + Q, +) +from django.db.models.functions import Cast + +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +from plane.app.permissions import ( + WorkSpaceAdminPermission, + WorkspaceEntityPermission, + WorkspaceUserPermission, +) + +# Module imports +from plane.app.serializers import ( + ProjectMemberRoleSerializer, + TeamSerializer, + UserLiteSerializer, + WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, + WorkSpaceMemberSerializer, +) +from plane.app.views.base import BaseAPIView +from plane.db.models import ( + Project, + ProjectMember, + Team, + User, + Workspace, + WorkspaceMember, +) +from plane.utils.cache import cache_response, invalidate_cache + +from .. import BaseViewSet + + +class WorkSpaceMemberViewSet(BaseViewSet): + serializer_class = WorkspaceMemberAdminSerializer + model = WorkspaceMember + + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get_permissions(self): + if self.action == "leave": + self.permission_classes = [ + WorkspaceUserPermission, + ] + else: + self.permission_classes = [ + WorkspaceEntityPermission, + ] + + return super(WorkSpaceMemberViewSet, self).get_permissions() + + search_fields = [ + "member__display_name", + "member__first_name", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + is_active=True, + ) + .select_related("workspace", "workspace__owner") + .select_related("member") + ) + + @cache_response(60 * 60 * 2) + def list(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, + ) + + # Get all active workspace members + workspace_members = self.get_queryset() + + if workspace_member.role > 10: + serializer = WorkspaceMemberAdminSerializer( + workspace_members, + fields=("id", "member", "role"), + many=True, + ) + else: + serializer = WorkSpaceMemberSerializer( + workspace_members, + fields=("id", "member", "role"), + many=True, + ) + return Response(serializer.data, status=status.HTTP_200_OK) + + @invalidate_cache( + path="/api/workspaces/:slug/members/", url_params=True, user=False + ) + def partial_update(self, request, slug, pk): + workspace_member = WorkspaceMember.objects.get( + pk=pk, + workspace__slug=slug, + member__is_bot=False, + is_active=True, + ) + if request.user.id == workspace_member.member_id: + return Response( + {"error": "You cannot update your own role"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the requested user role + requested_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + # Check if role is being updated + # One cannot update role higher than his own role + if ( + "role" in request.data + and int(request.data.get("role", workspace_member.role)) + > requested_workspace_member.role + ): + return Response( + { + "error": "You cannot update a role that is higher than your own role" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = WorkSpaceMemberSerializer( + workspace_member, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + @invalidate_cache( + path="/api/workspaces/:slug/members/", url_params=True, user=False + ) + @invalidate_cache(path="/api/users/me/settings/") + def destroy(self, request, slug, pk): + # Check the user role who is deleting the user + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + pk=pk, + member__is_bot=False, + is_active=True, + ) + + # check requesting user role + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + if str(workspace_member.id) == str(requesting_workspace_member.id): + return Response( + { + "error": "You cannot remove yourself from the workspace. Please use leave workspace" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if requesting_workspace_member.role < workspace_member.role: + return Response( + { + "error": "You cannot remove a user having role higher than you" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if ( + Project.objects.annotate( + total_members=Count("project_projectmember"), + member_with_role=Count( + "project_projectmember", + filter=Q( + project_projectmember__member_id=workspace_member.id, + project_projectmember__role=20, + ), + ), + ) + .filter(total_members=1, member_with_role=1, workspace__slug=slug) + .exists() + ): + return Response( + { + "error": "User is a part of some projects where they are the only admin, they should either leave that project or promote another user to admin." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Deactivate the users from the projects where the user is part of + _ = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=workspace_member.member_id, + is_active=True, + ).update(is_active=False) + + workspace_member.is_active = False + workspace_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + @invalidate_cache( + path="/api/workspaces/:slug/members/", url_params=True, user=False + ) + @invalidate_cache(path="/api/users/me/settings/") + def leave(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + + # Check if the leaving user is the only admin of the workspace + if ( + workspace_member.role == 20 + and not WorkspaceMember.objects.filter( + workspace__slug=slug, + role=20, + is_active=True, + ).count() + > 1 + ): + return Response( + { + "error": "You cannot leave the workspace as you are the only admin of the workspace you will have to either delete the workspace or promote another user to admin." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + if ( + Project.objects.annotate( + total_members=Count("project_projectmember"), + member_with_role=Count( + "project_projectmember", + filter=Q( + project_projectmember__member_id=request.user.id, + project_projectmember__role=20, + ), + ), + ) + .filter(total_members=1, member_with_role=1, workspace__slug=slug) + .exists() + ): + return Response( + { + "error": "You are a part of some projects where you are the only admin, you should either leave the project or promote another user to admin." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # # Deactivate the users from the projects where the user is part of + _ = ProjectMember.objects.filter( + workspace__slug=slug, + member_id=workspace_member.member_id, + is_active=True, + ).update(is_active=False) + + # # Deactivate the user + workspace_member.is_active = False + workspace_member.save() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkspaceMemberUserViewsEndpoint(BaseAPIView): + def post(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + workspace_member.view_props = request.data.get("view_props", {}) + workspace_member.save() + + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkspaceMemberUserEndpoint(BaseAPIView): + def get(self, request, slug): + workspace_member = WorkspaceMember.objects.get( + member=request.user, + workspace__slug=slug, + is_active=True, + ) + serializer = WorkspaceMemberMeSerializer(workspace_member) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WorkspaceProjectMemberEndpoint(BaseAPIView): + serializer_class = ProjectMemberRoleSerializer + model = ProjectMember + + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get(self, request, slug): + # Fetch all project IDs where the user is involved + project_ids = ( + ProjectMember.objects.filter( + member=request.user, + is_active=True, + ) + .values_list("project_id", flat=True) + .distinct() + ) + + # Get all the project members in which the user is involved + project_members = ProjectMember.objects.filter( + workspace__slug=slug, + project_id__in=project_ids, + is_active=True, + ).select_related("project", "member", "workspace") + project_members = ProjectMemberRoleSerializer( + project_members, many=True + ).data + + project_members_dict = dict() + + # Construct a dictionary with project_id as key and project_members as value + for project_member in project_members: + project_id = project_member.pop("project") + if str(project_id) not in project_members_dict: + project_members_dict[str(project_id)] = [] + project_members_dict[str(project_id)].append(project_member) + + return Response(project_members_dict, status=status.HTTP_200_OK) + + +class TeamMemberViewSet(BaseViewSet): + serializer_class = TeamSerializer + model = Team + permission_classes = [ + WorkSpaceAdminPermission, + ] + + search_fields = [ + "member__display_name", + "member__first_name", + ] + + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("workspace", "workspace__owner") + .prefetch_related("members") + ) + + def create(self, request, slug): + members = list( + WorkspaceMember.objects.filter( + workspace__slug=slug, + member__id__in=request.data.get("members", []), + is_active=True, + ) + .annotate(member_str_id=Cast("member", output_field=CharField())) + .distinct() + .values_list("member_str_id", flat=True) + ) + + if len(members) != len(request.data.get("members", [])): + users = list( + set(request.data.get("members", [])).difference(members) + ) + users = User.objects.filter(pk__in=users) + + serializer = UserLiteSerializer(users, many=True) + return Response( + { + "error": f"{len(users)} of the member(s) are not a part of the workspace", + "members": serializer.data, + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + serializer = TeamSerializer( + data=request.data, context={"workspace": workspace} + ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/app/views/workspace/module.py b/apiserver/plane/app/views/workspace/module.py new file mode 100644 index 00000000000..8dd5e97f489 --- /dev/null +++ b/apiserver/plane/app/views/workspace/module.py @@ -0,0 +1,110 @@ +# Django imports +from django.db.models import ( + Prefetch, + Q, + Count, +) + +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.views.base import BaseAPIView +from plane.db.models import ( + Module, + ModuleLink, +) +from plane.app.permissions import WorkspaceViewerPermission +from plane.app.serializers.module import ModuleSerializer + +class WorkspaceModulesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + def get(self, request, slug): + modules = ( + Module.objects.filter(workspace__slug=slug) + .select_related("project") + .select_related("workspace") + .select_related("lead") + .prefetch_related("members") + .prefetch_related( + Prefetch( + "link_module", + queryset=ModuleLink.objects.select_related( + "module", "created_by" + ), + ) + ) + .annotate( + total_issues=Count( + "issue_module", + filter=Q( + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ), + ) + .annotate( + completed_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="completed", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + cancelled_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="cancelled", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + started_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="started", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + unstarted_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="unstarted", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .annotate( + backlog_issues=Count( + "issue_module__issue__state__group", + filter=Q( + issue_module__issue__state__group="backlog", + issue_module__issue__archived_at__isnull=True, + issue_module__issue__is_draft=False, + ), + distinct=True, + ) + ) + .order_by(self.kwargs.get("order_by", "-created_at")) + ) + + serializer = ModuleSerializer(modules, many=True).data + return Response(serializer, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace/state.py b/apiserver/plane/app/views/workspace/state.py new file mode 100644 index 00000000000..7e3b158e504 --- /dev/null +++ b/apiserver/plane/app/views/workspace/state.py @@ -0,0 +1,26 @@ +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.serializers import StateSerializer +from plane.app.views.base import BaseAPIView +from plane.db.models import State +from plane.app.permissions import WorkspaceEntityPermission +from plane.utils.cache import cache_response + +class WorkspaceStatesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceEntityPermission, + ] + + @cache_response(60 * 60 * 2) + def get(self, request, slug): + states = State.objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + serializer = StateSerializer(states, many=True).data + return Response(serializer, status=status.HTTP_200_OK) diff --git a/apiserver/plane/app/views/workspace/user.py b/apiserver/plane/app/views/workspace/user.py new file mode 100644 index 00000000000..94a22a1a7f2 --- /dev/null +++ b/apiserver/plane/app/views/workspace/user.py @@ -0,0 +1,577 @@ +# Python imports +from datetime import date +from dateutil.relativedelta import relativedelta + +# Django imports +from django.utils import timezone +from django.db.models import ( + OuterRef, + Func, + F, + Q, + Count, + Case, + Value, + CharField, + When, + Max, + IntegerField, + UUIDField, +) +from django.db.models.functions import ExtractWeek, Cast +from django.db.models.fields import DateField +from django.contrib.postgres.aggregates import ArrayAgg +from django.contrib.postgres.fields import ArrayField +from django.db.models.functions import Coalesce + +# Third party modules +from rest_framework import status +from rest_framework.response import Response + +# Module imports +from plane.app.serializers import ( + WorkSpaceSerializer, + ProjectMemberSerializer, + IssueActivitySerializer, + IssueSerializer, + WorkspaceUserPropertiesSerializer, +) +from plane.app.views.base import BaseAPIView +from plane.db.models import ( + User, + Workspace, + ProjectMember, + IssueActivity, + Issue, + IssueLink, + IssueAttachment, + IssueSubscriber, + Project, + WorkspaceMember, + CycleIssue, + WorkspaceUserProperties, +) +from plane.app.permissions import ( + WorkspaceEntityPermission, + WorkspaceViewerPermission, +) +from plane.utils.issue_filters import issue_filters + + +class UserLastProjectWithWorkspaceEndpoint(BaseAPIView): + def get(self, request): + user = User.objects.get(pk=request.user.id) + + last_workspace_id = user.last_workspace_id + + if last_workspace_id is None: + return Response( + { + "project_details": [], + "workspace_details": {}, + }, + status=status.HTTP_200_OK, + ) + + workspace = Workspace.objects.get(pk=last_workspace_id) + workspace_serializer = WorkSpaceSerializer(workspace) + + project_member = ProjectMember.objects.filter( + workspace_id=last_workspace_id, member=request.user + ).select_related("workspace", "project", "member", "workspace__owner") + + project_member_serializer = ProjectMemberSerializer( + project_member, many=True + ) + + return Response( + { + "workspace_details": workspace_serializer.data, + "project_details": project_member_serializer.data, + }, + status=status.HTTP_200_OK, + ) + + +class WorkspaceUserProfileIssuesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + def get(self, request, slug, user_id): + fields = [ + field + for field in request.GET.get("fields", "").split(",") + if field + ] + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", "none"] + state_order = [ + "backlog", + "unstarted", + "started", + "completed", + "cancelled", + ] + + order_by_param = request.GET.get("order_by", "-created_at") + issue_queryset = ( + Issue.issue_objects.filter( + Q(assignees__in=[user_id]) + | Q(created_by_id=user_id) + | Q(issue_subscribers__subscriber_id=user_id), + workspace__slug=slug, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + ) + .filter(**filters) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels", "issue_module__module") + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + sub_issues_count=Issue.issue_objects.filter( + parent=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + label_ids=Coalesce( + ArrayAgg( + "labels__id", + distinct=True, + filter=~Q(labels__id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + assignee_ids=Coalesce( + ArrayAgg( + "assignees__id", + distinct=True, + filter=~Q(assignees__id__isnull=True) + & Q(assignees__member_project__is_active=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + module_ids=Coalesce( + ArrayAgg( + "issue_module__module_id", + distinct=True, + filter=~Q(issue_module__module_id__isnull=True), + ), + Value([], output_field=ArrayField(UUIDField())), + ), + ) + .order_by("created_at") + ).distinct() + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" + if order_by_param.startswith("-") + else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueSerializer( + issue_queryset, many=True, fields=fields if fields else None + ).data + return Response(issues, status=status.HTTP_200_OK) + + +class WorkspaceUserPropertiesEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceViewerPermission, + ] + + def patch(self, request, slug): + workspace_properties = WorkspaceUserProperties.objects.get( + user=request.user, + workspace__slug=slug, + ) + + workspace_properties.filters = request.data.get( + "filters", workspace_properties.filters + ) + workspace_properties.display_filters = request.data.get( + "display_filters", workspace_properties.display_filters + ) + workspace_properties.display_properties = request.data.get( + "display_properties", workspace_properties.display_properties + ) + workspace_properties.save() + + serializer = WorkspaceUserPropertiesSerializer(workspace_properties) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + def get(self, request, slug): + ( + workspace_properties, + _, + ) = WorkspaceUserProperties.objects.get_or_create( + user=request.user, workspace__slug=slug + ) + serializer = WorkspaceUserPropertiesSerializer(workspace_properties) + return Response(serializer.data, status=status.HTTP_200_OK) + + +class WorkspaceUserProfileEndpoint(BaseAPIView): + def get(self, request, slug, user_id): + user_data = User.objects.get(pk=user_id) + + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, + member=request.user, + is_active=True, + ) + projects = [] + if requesting_workspace_member.role >= 10: + projects = ( + Project.objects.filter( + workspace__slug=slug, + project_projectmember__member=request.user, + project_projectmember__is_active=True, + archived_at__isnull=True, + ) + .annotate( + created_issues=Count( + "project_issue", + filter=Q( + project_issue__created_by_id=user_id, + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + assigned_issues=Count( + "project_issue", + filter=Q( + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + completed_issues=Count( + "project_issue", + filter=Q( + project_issue__completed_at__isnull=False, + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .annotate( + pending_issues=Count( + "project_issue", + filter=Q( + project_issue__state__group__in=[ + "backlog", + "unstarted", + "started", + ], + project_issue__assignees__in=[user_id], + project_issue__archived_at__isnull=True, + project_issue__is_draft=False, + ), + ) + ) + .values( + "id", + "logo_props", + "created_issues", + "assigned_issues", + "completed_issues", + "pending_issues", + ) + ) + + return Response( + { + "project_data": projects, + "user_data": { + "email": user_data.email, + "first_name": user_data.first_name, + "last_name": user_data.last_name, + "avatar": user_data.avatar, + "cover_image": user_data.cover_image, + "date_joined": user_data.date_joined, + "user_timezone": user_data.user_timezone, + "display_name": user_data.display_name, + }, + }, + status=status.HTTP_200_OK, + ) + + +class WorkspaceUserActivityEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get(self, request, slug, user_id): + projects = request.query_params.getlist("project", []) + + queryset = IssueActivity.objects.filter( + ~Q(field__in=["comment", "vote", "reaction", "draft"]), + workspace__slug=slug, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + actor=user_id, + ).select_related("actor", "workspace", "issue", "project") + + if projects: + queryset = queryset.filter(project__in=projects) + + return self.paginate( + request=request, + queryset=queryset, + on_results=lambda issue_activities: IssueActivitySerializer( + issue_activities, many=True + ).data, + ) + + +class WorkspaceUserProfileStatsEndpoint(BaseAPIView): + def get(self, request, slug, user_id): + filters = issue_filters(request.query_params, "GET") + + state_distribution = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + ) + .filter(**filters) + .annotate(state_group=F("state__group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") + ) + + priority_order = ["urgent", "high", "medium", "low", "none"] + + priority_distribution = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + ) + .filter(**filters) + .values("priority") + .annotate(priority_count=Count("priority")) + .filter(priority_count__gte=1) + .annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + default=Value(len(priority_order)), + output_field=IntegerField(), + ) + ) + .order_by("priority_order") + ) + + created_issues = ( + Issue.issue_objects.filter( + workspace__slug=slug, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + created_by_id=user_id, + ) + .filter(**filters) + .count() + ) + + assigned_issues_count = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + ) + .filter(**filters) + .count() + ) + + pending_issues_count = ( + Issue.issue_objects.filter( + ~Q(state__group__in=["completed", "cancelled"]), + workspace__slug=slug, + assignees__in=[user_id], + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + ) + .filter(**filters) + .count() + ) + + completed_issues_count = ( + Issue.issue_objects.filter( + workspace__slug=slug, + assignees__in=[user_id], + state__group="completed", + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + ) + .filter(**filters) + .count() + ) + + subscribed_issues_count = ( + IssueSubscriber.objects.filter( + workspace__slug=slug, + subscriber_id=user_id, + project__project_projectmember__member=request.user, + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, + ) + .filter(**filters) + .count() + ) + + upcoming_cycles = CycleIssue.objects.filter( + workspace__slug=slug, + cycle__start_date__gt=timezone.now().date(), + issue__assignees__in=[ + user_id, + ], + ).values("cycle__name", "cycle__id", "cycle__project_id") + + present_cycle = CycleIssue.objects.filter( + workspace__slug=slug, + cycle__start_date__lt=timezone.now().date(), + cycle__end_date__gt=timezone.now().date(), + issue__assignees__in=[ + user_id, + ], + ).values("cycle__name", "cycle__id", "cycle__project_id") + + return Response( + { + "state_distribution": state_distribution, + "priority_distribution": priority_distribution, + "created_issues": created_issues, + "assigned_issues": assigned_issues_count, + "completed_issues": completed_issues_count, + "pending_issues": pending_issues_count, + "subscribed_issues": subscribed_issues_count, + "present_cycles": present_cycle, + "upcoming_cycles": upcoming_cycles, + } + ) + + +class UserActivityGraphEndpoint(BaseAPIView): + def get(self, request, slug): + issue_activities = ( + IssueActivity.objects.filter( + actor=request.user, + workspace__slug=slug, + created_at__date__gte=date.today() + relativedelta(months=-6), + ) + .annotate(created_date=Cast("created_at", DateField())) + .values("created_date") + .annotate(activity_count=Count("created_date")) + .order_by("created_date") + ) + + return Response(issue_activities, status=status.HTTP_200_OK) + + +class UserIssueCompletedGraphEndpoint(BaseAPIView): + def get(self, request, slug): + month = request.GET.get("month", 1) + + issues = ( + Issue.issue_objects.filter( + assignees__in=[request.user], + workspace__slug=slug, + completed_at__month=month, + completed_at__isnull=False, + ) + .annotate(completed_week=ExtractWeek("completed_at")) + .annotate(week=F("completed_week") % 4) + .values("week") + .annotate(completed_count=Count("completed_week")) + .order_by("week") + ) + + return Response(issues, status=status.HTTP_200_OK) diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py index 7789562293f..e6788df79ab 100644 --- a/apiserver/plane/bgtasks/analytic_plot_export.py +++ b/apiserver/plane/bgtasks/analytic_plot_export.py @@ -1,24 +1,22 @@ # Python imports import csv import io -import requests -import json +import logging + +# Third party imports +from celery import shared_task # Django imports from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings - -# Third party imports -from celery import shared_task -from sentry_sdk import capture_exception # Module imports from plane.db.models import Issue +from plane.license.utils.instance_value import get_email_configuration from plane.utils.analytics_plot import build_graph_plot +from plane.utils.exception_logger import log_exception from plane.utils.issue_filters import issue_filters -from plane.license.utils.instance_value import get_email_configuration row_mapping = { "state__name": "State", @@ -57,6 +55,7 @@ def send_export_email(email, slug, csv_buffer, rows): EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() @@ -66,6 +65,7 @@ def send_export_email(email, slug, csv_buffer, rows): username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -212,9 +212,9 @@ def generate_segmented_rows( None, ) if assignee: - generated_row[ - 0 - ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + generated_row[0] = ( + f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + ) if x_axis == LABEL_ID: label = next( @@ -281,9 +281,9 @@ def generate_segmented_rows( None, ) if assignee: - row_zero[ - index + 2 - ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + row_zero[index + 2] = ( + f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + ) if segmented == LABEL_ID: for index, segm in enumerate(row_zero[2:]): @@ -368,9 +368,9 @@ def generate_non_segmented_rows( None, ) if assignee: - row[ - 0 - ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + row[0] = ( + f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + ) if x_axis == LABEL_ID: label = next( @@ -506,10 +506,8 @@ def analytic_export_task(email, data, slug): csv_buffer = generate_csv_from_rows(rows) send_export_email(email, slug, csv_buffer, rows) + logging.getLogger("plane").info("Email sent succesfully.") return except Exception as e: - print(e) - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/email_notification_task.py b/apiserver/plane/bgtasks/email_notification_task.py index 2a98c6b3324..682ccb4a31d 100644 --- a/apiserver/plane/bgtasks/email_notification_task.py +++ b/apiserver/plane/bgtasks/email_notification_task.py @@ -1,33 +1,37 @@ +import logging from datetime import datetime + from bs4 import BeautifulSoup # Third party imports from celery import shared_task -from sentry_sdk import capture_exception +from django.core.mail import EmailMultiAlternatives, get_connection +from django.template.loader import render_to_string # Django imports from django.utils import timezone -from django.core.mail import EmailMultiAlternatives, get_connection -from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings # Module imports -from plane.db.models import EmailNotificationLog, User, Issue +from plane.db.models import EmailNotificationLog, Issue, User from plane.license.utils.instance_value import get_email_configuration from plane.settings.redis import redis_instance +from plane.utils.exception_logger import log_exception + # acquire and delete redis lock def acquire_lock(lock_id, expire_time=300): redis_client = redis_instance() """Attempt to acquire a lock with a specified expiration time.""" - return redis_client.set(lock_id, 'true', nx=True, ex=expire_time) + return redis_client.set(lock_id, "true", nx=True, ex=expire_time) + def release_lock(lock_id): """Release a lock.""" redis_client = redis_instance() redis_client.delete(lock_id) + @shared_task def stack_email_notification(): # get all email notifications @@ -101,31 +105,31 @@ def create_payload(notification_data): # Append old_value if it's not empty and not already in the list if old_value: - data.setdefault(actor_id, {}).setdefault( - field, {} - ).setdefault("old_value", []).append( - old_value - ) if old_value not in data.setdefault( - actor_id, {} - ).setdefault( - field, {} - ).get( - "old_value", [] - ) else None + ( + data.setdefault(actor_id, {}) + .setdefault(field, {}) + .setdefault("old_value", []) + .append(old_value) + if old_value + not in data.setdefault(actor_id, {}) + .setdefault(field, {}) + .get("old_value", []) + else None + ) # Append new_value if it's not empty and not already in the list if new_value: - data.setdefault(actor_id, {}).setdefault( - field, {} - ).setdefault("new_value", []).append( - new_value - ) if new_value not in data.setdefault( - actor_id, {} - ).setdefault( - field, {} - ).get( - "new_value", [] - ) else None + ( + data.setdefault(actor_id, {}) + .setdefault(field, {}) + .setdefault("new_value", []) + .append(new_value) + if new_value + not in data.setdefault(actor_id, {}) + .setdefault(field, {}) + .get("new_value", []) + else None + ) if not data.get("actor_id", {}).get("activity_time", False): data[actor_id]["activity_time"] = str( @@ -136,22 +140,24 @@ def create_payload(notification_data): return data + def process_mention(mention_component): - soup = BeautifulSoup(mention_component, 'html.parser') - mentions = soup.find_all('mention-component') + soup = BeautifulSoup(mention_component, "html.parser") + mentions = soup.find_all("mention-component") for mention in mentions: - user_id = mention['id'] + user_id = mention["id"] user = User.objects.get(pk=user_id) user_name = user.display_name highlighted_name = f"@{user_name}" mention.replace_with(highlighted_name) return str(soup) + def process_html_content(content): processed_content_list = [] for html_content in content: processed_content = process_mention(html_content) - processed_content_list.append(processed_content) + processed_content_list.append(processed_content) return processed_content_list @@ -169,7 +175,7 @@ def send_email_notification( if acquire_lock(lock_id=lock_id): # get the redis instance ri = redis_instance() - base_api = (ri.get(str(issue_id)).decode()) + base_api = ri.get(str(issue_id)).decode() data = create_payload(notification_data=notification_data) # Get email configurations @@ -179,6 +185,7 @@ def send_email_notification( EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() @@ -206,8 +213,12 @@ def send_email_notification( } ) if mention: - mention["new_value"] = process_html_content(mention.get("new_value")) - mention["old_value"] = process_html_content(mention.get("old_value")) + mention["new_value"] = process_html_content( + mention.get("new_value") + ) + mention["old_value"] = process_html_content( + mention.get("old_value") + ) comments.append( { "actor_comments": mention, @@ -220,7 +231,9 @@ def send_email_notification( ) activity_time = changes.pop("activity_time") # Parse the input string into a datetime object - formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p") + formatted_time = datetime.strptime( + activity_time, "%Y-%m-%d %H:%M:%S" + ).strftime("%H:%M %p") if changes: template_data.append( @@ -237,12 +250,14 @@ def send_email_notification( }, "activity_time": str(formatted_time), } - ) + ) summary = "Updates were made to the issue by" # Send the mail - subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}" + subject = ( + f"{issue.project.identifier}-{issue.sequence_id} {issue.name}" + ) context = { "data": template_data, "summary": summary, @@ -257,7 +272,7 @@ def send_email_notification( }, "issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}", "project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/", - "workspace":str(issue.project.workspace.slug), + "workspace": str(issue.project.workspace.slug), "project": str(issue.project.name), "user_preference": f"{base_api}/profile/preferences/email", "comments": comments, @@ -274,6 +289,7 @@ def send_email_notification( username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -285,7 +301,9 @@ def send_email_notification( ) msg.attach_alternative(html_content, "text/html") msg.send() + logging.getLogger("plane").info("Email Sent Successfully") + # Update the logs EmailNotificationLog.objects.filter( pk__in=email_notification_ids ).update(sent_at=timezone.now()) @@ -294,15 +312,20 @@ def send_email_notification( release_lock(lock_id=lock_id) return except Exception as e: - capture_exception(e) + log_exception(e) # release the lock release_lock(lock_id=lock_id) return else: - print("Duplicate task recived. Skipping...") + logging.getLogger("plane").info( + "Duplicate email received skipping" + ) return except (Issue.DoesNotExist, User.DoesNotExist) as e: - if settings.DEBUG: - print(e) + log_exception(e) + release_lock(lock_id=lock_id) + return + except Exception as e: + log_exception(e) release_lock(lock_id=lock_id) return diff --git a/apiserver/plane/bgtasks/event_tracking_task.py b/apiserver/plane/bgtasks/event_tracking_task.py index 82a8281a95b..135ae1dd11f 100644 --- a/apiserver/plane/bgtasks/event_tracking_task.py +++ b/apiserver/plane/bgtasks/event_tracking_task.py @@ -1,13 +1,13 @@ -import uuid import os +import uuid # third party imports from celery import shared_task -from sentry_sdk import capture_exception from posthog import Posthog # module imports from plane.license.utils.instance_value import get_configuration_value +from plane.utils.exception_logger import log_exception def posthogConfiguration(): @@ -51,7 +51,8 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time): }, ) except Exception as e: - capture_exception(e) + log_exception(e) + return @shared_task @@ -77,4 +78,5 @@ def workspace_invite_event( }, ) except Exception as e: - capture_exception(e) + log_exception(e) + return diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py index d8522e7697f..c99836c8313 100644 --- a/apiserver/plane/bgtasks/export_task.py +++ b/apiserver/plane/bgtasks/export_task.py @@ -2,21 +2,22 @@ import csv import io import json -import boto3 import zipfile -# Django imports -from django.conf import settings -from django.utils import timezone +import boto3 +from botocore.client import Config # Third party imports from celery import shared_task -from sentry_sdk import capture_exception -from botocore.client import Config + +# Django imports +from django.conf import settings +from django.utils import timezone from openpyxl import Workbook # Module imports -from plane.db.models import Issue, ExporterHistory +from plane.db.models import ExporterHistory, Issue +from plane.utils.exception_logger import log_exception def dateTimeConverter(time): @@ -144,12 +145,17 @@ def generate_table_row(issue): issue["description_stripped"], issue["state__name"], issue["priority"], - f"{issue['created_by__first_name']} {issue['created_by__last_name']}" - if issue["created_by__first_name"] and issue["created_by__last_name"] - else "", - f"{issue['assignees__first_name']} {issue['assignees__last_name']}" - if issue["assignees__first_name"] and issue["assignees__last_name"] - else "", + ( + f"{issue['created_by__first_name']} {issue['created_by__last_name']}" + if issue["created_by__first_name"] + and issue["created_by__last_name"] + else "" + ), + ( + f"{issue['assignees__first_name']} {issue['assignees__last_name']}" + if issue["assignees__first_name"] and issue["assignees__last_name"] + else "" + ), issue["labels__name"], issue["issue_cycle__cycle__name"], dateConverter(issue["issue_cycle__cycle__start_date"]), @@ -172,12 +178,17 @@ def generate_json_row(issue): "Description": issue["description_stripped"], "State": issue["state__name"], "Priority": issue["priority"], - "Created By": f"{issue['created_by__first_name']} {issue['created_by__last_name']}" - if issue["created_by__first_name"] and issue["created_by__last_name"] - else "", - "Assignee": f"{issue['assignees__first_name']} {issue['assignees__last_name']}" - if issue["assignees__first_name"] and issue["assignees__last_name"] - else "", + "Created By": ( + f"{issue['created_by__first_name']} {issue['created_by__last_name']}" + if issue["created_by__first_name"] + and issue["created_by__last_name"] + else "" + ), + "Assignee": ( + f"{issue['assignees__first_name']} {issue['assignees__last_name']}" + if issue["assignees__first_name"] and issue["assignees__last_name"] + else "" + ), "Labels": issue["labels__name"], "Cycle Name": issue["issue_cycle__cycle__name"], "Cycle Start Date": dateConverter( @@ -292,7 +303,8 @@ def issue_export_task( workspace__id=workspace_id, project_id__in=project_ids, project__project_projectmember__member=exporter_instance.initiated_by_id, - project__project_projectmember__is_active=True + project__project_projectmember__is_active=True, + project__archived_at__isnull=True, ) .select_related( "project", "workspace", "state", "parent", "created_by" @@ -393,8 +405,5 @@ def issue_export_task( exporter_instance.status = "failed" exporter_instance.reason = str(e) exporter_instance.save(update_fields=["status", "reason"]) - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/forgot_password_task.py b/apiserver/plane/bgtasks/forgot_password_task.py index a2ac62927d0..b30c9311fef 100644 --- a/apiserver/plane/bgtasks/forgot_password_task.py +++ b/apiserver/plane/bgtasks/forgot_password_task.py @@ -1,28 +1,23 @@ -# Python import -import os -import requests -import json +# Python imports +import logging + +# Third party imports +from celery import shared_task # Django imports from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings - -# Third party imports -from celery import shared_task -from sentry_sdk import capture_exception # Module imports from plane.license.utils.instance_value import get_email_configuration +from plane.utils.exception_logger import log_exception @shared_task def forgot_password(first_name, email, uidb64, token, current_site): try: - relative_link = ( - f"/accounts/reset-password/?uidb64={uidb64}&token={token}&email={email}" - ) + relative_link = f"/accounts/reset-password/?uidb64={uidb64}&token={token}&email={email}" abs_url = str(current_site) + relative_link ( @@ -31,6 +26,7 @@ def forgot_password(first_name, email, uidb64, token, current_site): EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() @@ -54,6 +50,7 @@ def forgot_password(first_name, email, uidb64, token, current_site): username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -65,10 +62,8 @@ def forgot_password(first_name, email, uidb64, token, current_site): ) msg.attach_alternative(html_content, "text/html") msg.send() + logging.getLogger("plane").info("Email sent successfully") return except Exception as e: - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/importer_task.py b/apiserver/plane/bgtasks/importer_task.py deleted file mode 100644 index 7a1dc4fc6d2..00000000000 --- a/apiserver/plane/bgtasks/importer_task.py +++ /dev/null @@ -1,201 +0,0 @@ -# Python imports -import json -import requests -import uuid - -# Django imports -from django.conf import settings -from django.core.serializers.json import DjangoJSONEncoder -from django.contrib.auth.hashers import make_password - -# Third Party imports -from celery import shared_task -from sentry_sdk import capture_exception - -# Module imports -from plane.app.serializers import ImporterSerializer -from plane.db.models import ( - Importer, - WorkspaceMember, - GithubRepositorySync, - GithubRepository, - ProjectMember, - WorkspaceIntegration, - Label, - User, - IssueProperty, - UserNotificationPreference, -) - - -@shared_task -def service_importer(service, importer_id): - try: - importer = Importer.objects.get(pk=importer_id) - importer.status = "processing" - importer.save() - - users = importer.data.get("users", []) - - # Check if we need to import users as well - if len(users): - # For all invited users create the users - new_users = User.objects.bulk_create( - [ - User( - email=user.get("email").strip().lower(), - username=uuid.uuid4().hex, - password=make_password(uuid.uuid4().hex), - is_password_autoset=True, - ) - for user in users - if user.get("import", False) == "invite" - ], - batch_size=100, - ignore_conflicts=True, - ) - - _ = UserNotificationPreference.objects.bulk_create( - [UserNotificationPreference(user=user) for user in new_users], - batch_size=100, - ) - - workspace_users = User.objects.filter( - email__in=[ - user.get("email").strip().lower() - for user in users - if user.get("import", False) == "invite" - or user.get("import", False) == "map" - ] - ) - - # Check if any of the users are already member of workspace - _ = WorkspaceMember.objects.filter( - member__in=[user for user in workspace_users], - workspace_id=importer.workspace_id, - ).update(is_active=True) - - # Add new users to Workspace and project automatically - WorkspaceMember.objects.bulk_create( - [ - WorkspaceMember( - member=user, - workspace_id=importer.workspace_id, - created_by=importer.created_by, - ) - for user in workspace_users - ], - batch_size=100, - ignore_conflicts=True, - ) - - ProjectMember.objects.bulk_create( - [ - ProjectMember( - project_id=importer.project_id, - workspace_id=importer.workspace_id, - member=user, - created_by=importer.created_by, - ) - for user in workspace_users - ], - batch_size=100, - ignore_conflicts=True, - ) - - IssueProperty.objects.bulk_create( - [ - IssueProperty( - project_id=importer.project_id, - workspace_id=importer.workspace_id, - user=user, - created_by=importer.created_by, - ) - for user in workspace_users - ], - batch_size=100, - ignore_conflicts=True, - ) - - # Check if sync config is on for github importers - if service == "github" and importer.config.get("sync", False): - name = importer.metadata.get("name", False) - url = importer.metadata.get("url", False) - config = importer.metadata.get("config", {}) - owner = importer.metadata.get("owner", False) - repository_id = importer.metadata.get("repository_id", False) - - workspace_integration = WorkspaceIntegration.objects.get( - workspace_id=importer.workspace_id, - integration__provider="github", - ) - - # Delete the old repository object - GithubRepositorySync.objects.filter( - project_id=importer.project_id - ).delete() - GithubRepository.objects.filter( - project_id=importer.project_id - ).delete() - - # Create a Label for github - label = Label.objects.filter( - name="GitHub", project_id=importer.project_id - ).first() - - if label is None: - label = Label.objects.create( - name="GitHub", - project_id=importer.project_id, - description="Label to sync Plane issues with GitHub issues", - color="#003773", - ) - # Create repository - repo = GithubRepository.objects.create( - name=name, - url=url, - config=config, - repository_id=repository_id, - owner=owner, - project_id=importer.project_id, - ) - - # Create repo sync - _ = GithubRepositorySync.objects.create( - repository=repo, - workspace_integration=workspace_integration, - actor=workspace_integration.actor, - credentials=importer.data.get("credentials", {}), - project_id=importer.project_id, - label=label, - ) - - # Add bot as a member in the project - _ = ProjectMember.objects.get_or_create( - member=workspace_integration.actor, - role=20, - project_id=importer.project_id, - ) - - if settings.PROXY_BASE_URL: - headers = {"Content-Type": "application/json"} - import_data_json = json.dumps( - ImporterSerializer(importer).data, - cls=DjangoJSONEncoder, - ) - _ = requests.post( - f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/", - json=import_data_json, - headers=headers, - ) - - return - except Exception as e: - importer = Importer.objects.get(pk=importer_id) - importer.status = "failed" - importer.save() - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) - return diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 2a16ee911a8..9a4e57a49f9 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -1,34 +1,36 @@ # Python imports import json + import requests +# Third Party imports +from celery import shared_task + # Django imports from django.conf import settings from django.core.serializers.json import DjangoJSONEncoder from django.utils import timezone -# Third Party imports -from celery import shared_task -from sentry_sdk import capture_exception +from plane.app.serializers import IssueActivitySerializer +from plane.bgtasks.notification_task import notifications # Module imports from plane.db.models import ( - User, + CommentReaction, + Cycle, Issue, - Project, - Label, IssueActivity, - State, - Cycle, - Module, - IssueReaction, - CommentReaction, IssueComment, + IssueReaction, IssueSubscriber, + Label, + Module, + Project, + State, + User, ) -from plane.app.serializers import IssueActivitySerializer -from plane.bgtasks.notification_task import notifications from plane.settings.redis import redis_instance +from plane.utils.exception_logger import log_exception # Track Changes in name @@ -53,7 +55,7 @@ def track_name( field="name", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the name to", + comment="updated the name to", epoch=epoch, ) ) @@ -96,7 +98,7 @@ def track_description( field="description", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the description to", + comment="updated the description to", epoch=epoch, ) ) @@ -130,22 +132,26 @@ def track_parent( issue_id=issue_id, actor_id=actor_id, verb="updated", - old_value=f"{old_parent.project.identifier}-{old_parent.sequence_id}" - if old_parent is not None - else "", - new_value=f"{new_parent.project.identifier}-{new_parent.sequence_id}" - if new_parent is not None - else "", + old_value=( + f"{old_parent.project.identifier}-{old_parent.sequence_id}" + if old_parent is not None + else "" + ), + new_value=( + f"{new_parent.project.identifier}-{new_parent.sequence_id}" + if new_parent is not None + else "" + ), field="parent", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the parent issue to", - old_identifier=old_parent.id - if old_parent is not None - else None, - new_identifier=new_parent.id - if new_parent is not None - else None, + comment="updated the parent issue to", + old_identifier=( + old_parent.id if old_parent is not None else None + ), + new_identifier=( + new_parent.id if new_parent is not None else None + ), epoch=epoch, ) ) @@ -173,7 +179,7 @@ def track_priority( field="priority", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the priority to", + comment="updated the priority to", epoch=epoch, ) ) @@ -206,7 +212,7 @@ def track_state( field="state", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the state to", + comment="updated the state to", old_identifier=old_state.id, new_identifier=new_state.id, epoch=epoch, @@ -233,16 +239,20 @@ def track_target_date( issue_id=issue_id, actor_id=actor_id, verb="updated", - old_value=current_instance.get("target_date") - if current_instance.get("target_date") is not None - else "", - new_value=requested_data.get("target_date") - if requested_data.get("target_date") is not None - else "", + old_value=( + current_instance.get("target_date") + if current_instance.get("target_date") is not None + else "" + ), + new_value=( + requested_data.get("target_date") + if requested_data.get("target_date") is not None + else "" + ), field="target_date", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the target date to", + comment="updated the target date to", epoch=epoch, ) ) @@ -265,16 +275,20 @@ def track_start_date( issue_id=issue_id, actor_id=actor_id, verb="updated", - old_value=current_instance.get("start_date") - if current_instance.get("start_date") is not None - else "", - new_value=requested_data.get("start_date") - if requested_data.get("start_date") is not None - else "", + old_value=( + current_instance.get("start_date") + if current_instance.get("start_date") is not None + else "" + ), + new_value=( + requested_data.get("start_date") + if requested_data.get("start_date") is not None + else "" + ), field="start_date", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the start date to ", + comment="updated the start date to ", epoch=epoch, ) ) @@ -334,7 +348,7 @@ def track_labels( field="labels", project_id=project_id, workspace_id=workspace_id, - comment=f"removed label ", + comment="removed label ", old_identifier=label.id, new_identifier=None, epoch=epoch, @@ -364,7 +378,6 @@ def track_assignees( else set() ) - added_assignees = requested_assignees - current_assignees dropped_assginees = current_assignees - requested_assignees @@ -381,7 +394,7 @@ def track_assignees( field="assignees", project_id=project_id, workspace_id=workspace_id, - comment=f"added assignee ", + comment="added assignee ", new_identifier=assignee.id, epoch=epoch, ) @@ -414,7 +427,7 @@ def track_assignees( field="assignees", project_id=project_id, workspace_id=workspace_id, - comment=f"removed assignee ", + comment="removed assignee ", old_identifier=assignee.id, epoch=epoch, ) @@ -439,16 +452,20 @@ def track_estimate_points( issue_id=issue_id, actor_id=actor_id, verb="updated", - old_value=current_instance.get("estimate_point") - if current_instance.get("estimate_point") is not None - else "", - new_value=requested_data.get("estimate_point") - if requested_data.get("estimate_point") is not None - else "", + old_value=( + current_instance.get("estimate_point") + if current_instance.get("estimate_point") is not None + else "" + ), + new_value=( + requested_data.get("estimate_point") + if requested_data.get("estimate_point") is not None + else "" + ), field="estimate_point", project_id=project_id, workspace_id=workspace_id, - comment=f"updated the estimate point to ", + comment="updated the estimate point to ", epoch=epoch, ) ) @@ -529,7 +546,7 @@ def track_closed_to( field="state", project_id=project_id, workspace_id=workspace_id, - comment=f"Plane updated the state to ", + comment="Plane updated the state to ", old_identifier=None, new_identifier=updated_state.id, epoch=epoch, @@ -552,7 +569,7 @@ def create_issue_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"created the issue", + comment="created the issue", verb="created", actor_id=actor_id, epoch=epoch, @@ -635,7 +652,7 @@ def delete_issue_activity( IssueActivity( project_id=project_id, workspace_id=workspace_id, - comment=f"deleted the issue", + comment="deleted the issue", verb="deleted", actor_id=actor_id, field="issue", @@ -666,7 +683,7 @@ def create_comment_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"created a comment", + comment="created a comment", verb="created", actor_id=actor_id, field="comment", @@ -703,7 +720,7 @@ def update_comment_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"updated a comment", + comment="updated a comment", verb="updated", actor_id=actor_id, field="comment", @@ -732,7 +749,7 @@ def delete_comment_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"deleted the comment", + comment="deleted the comment", verb="deleted", actor_id=actor_id, field="comment", @@ -932,7 +949,11 @@ def delete_module_issue_activity( project_id=project_id, workspace_id=workspace_id, comment=f"removed this issue from {module_name}", - old_identifier=requested_data.get("module_id") if requested_data.get("module_id") is not None else None, + old_identifier=( + requested_data.get("module_id") + if requested_data.get("module_id") is not None + else None + ), epoch=epoch, ) ) @@ -960,7 +981,7 @@ def create_link_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"created a link", + comment="created a link", verb="created", actor_id=actor_id, field="link", @@ -994,7 +1015,7 @@ def update_link_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"updated a link", + comment="updated a link", verb="updated", actor_id=actor_id, field="link", @@ -1026,7 +1047,7 @@ def delete_link_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"deleted the link", + comment="deleted the link", verb="deleted", actor_id=actor_id, field="link", @@ -1059,7 +1080,7 @@ def create_attachment_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"created an attachment", + comment="created an attachment", verb="created", actor_id=actor_id, field="attachment", @@ -1085,7 +1106,7 @@ def delete_attachment_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"deleted the attachment", + comment="deleted the attachment", verb="deleted", actor_id=actor_id, field="attachment", @@ -1362,12 +1383,15 @@ def create_issue_relation_activity( verb="created", old_value="", new_value=f"{issue.project.identifier}-{issue.sequence_id}", - field="blocking" - if requested_data.get("relation_type") == "blocked_by" - else ( - "blocked_by" - if requested_data.get("relation_type") == "blocking" - else requested_data.get("relation_type") + field=( + "blocking" + if requested_data.get("relation_type") == "blocked_by" + else ( + "blocked_by" + if requested_data.get("relation_type") + == "blocking" + else requested_data.get("relation_type") + ) ), project_id=project_id, workspace_id=workspace_id, @@ -1418,12 +1442,14 @@ def delete_issue_relation_activity( verb="deleted", old_value=f"{issue.project.identifier}-{issue.sequence_id}", new_value="", - field="blocking" - if requested_data.get("relation_type") == "blocked_by" - else ( - "blocked_by" - if requested_data.get("relation_type") == "blocking" - else requested_data.get("relation_type") + field=( + "blocking" + if requested_data.get("relation_type") == "blocked_by" + else ( + "blocked_by" + if requested_data.get("relation_type") == "blocking" + else requested_data.get("relation_type") + ) ), project_id=project_id, workspace_id=workspace_id, @@ -1449,7 +1475,7 @@ def create_draft_issue_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"drafted the issue", + comment="drafted the issue", field="draft", verb="created", actor_id=actor_id, @@ -1476,14 +1502,14 @@ def update_draft_issue_activity( ) if ( requested_data.get("is_draft") is not None - and requested_data.get("is_draft") == False + and requested_data.get("is_draft") is False ): issue_activities.append( IssueActivity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"created the issue", + comment="created the issue", verb="updated", actor_id=actor_id, epoch=epoch, @@ -1495,7 +1521,7 @@ def update_draft_issue_activity( issue_id=issue_id, project_id=project_id, workspace_id=workspace_id, - comment=f"updated the draft issue", + comment="updated the draft issue", field="draft", verb="updated", actor_id=actor_id, @@ -1518,7 +1544,7 @@ def delete_draft_issue_activity( IssueActivity( project_id=project_id, workspace_id=workspace_id, - comment=f"deleted the draft issue", + comment="deleted the draft issue", field="draft", verb="deleted", actor_id=actor_id, @@ -1557,7 +1583,7 @@ def issue_activity( try: issue.updated_at = timezone.now() issue.save(update_fields=["updated_at"]) - except Exception as e: + except Exception: pass ACTIVITY_MAPPER = { @@ -1623,7 +1649,7 @@ def issue_activity( headers=headers, ) except Exception as e: - capture_exception(e) + log_exception(e) if notification: notifications.delay( @@ -1644,8 +1670,5 @@ def issue_activity( return except Exception as e: - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/issue_automation_task.py b/apiserver/plane/bgtasks/issue_automation_task.py index c6c4d75158c..cdcdcd17408 100644 --- a/apiserver/plane/bgtasks/issue_automation_task.py +++ b/apiserver/plane/bgtasks/issue_automation_task.py @@ -2,18 +2,17 @@ import json from datetime import timedelta -# Django imports -from django.utils import timezone -from django.db.models import Q -from django.conf import settings - # Third party imports from celery import shared_task -from sentry_sdk import capture_exception +from django.db.models import Q + +# Django imports +from django.utils import timezone # Module imports -from plane.db.models import Issue, Project, State from plane.bgtasks.issue_activites_task import issue_activity +from plane.db.models import Issue, Project, State +from plane.utils.exception_logger import log_exception @shared_task @@ -79,7 +78,10 @@ def archive_old_issues(): issue_activity.delay( type="issue.activity.updated", requested_data=json.dumps( - {"archived_at": str(archive_at), "automation": True} + { + "archived_at": str(archive_at), + "automation": True, + } ), actor_id=str(project.created_by_id), issue_id=issue.id, @@ -93,9 +95,7 @@ def archive_old_issues(): ] return except Exception as e: - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return @@ -176,7 +176,5 @@ def close_old_issues(): ] return except Exception as e: - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/magic_link_code_task.py b/apiserver/plane/bgtasks/magic_link_code_task.py index b94ec4bfe23..4544e9889c1 100644 --- a/apiserver/plane/bgtasks/magic_link_code_task.py +++ b/apiserver/plane/bgtasks/magic_link_code_task.py @@ -1,20 +1,17 @@ # Python imports -import os -import requests -import json +import logging + +# Third party imports +from celery import shared_task # Django imports from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings - -# Third party imports -from celery import shared_task -from sentry_sdk import capture_exception # Module imports from plane.license.utils.instance_value import get_email_configuration +from plane.utils.exception_logger import log_exception @shared_task @@ -26,6 +23,7 @@ def magic_link(email, key, token, current_site): EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() @@ -44,6 +42,7 @@ def magic_link(email, key, token, current_site): username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -55,11 +54,8 @@ def magic_link(email, key, token, current_site): ) msg.attach_alternative(html_content, "text/html") msg.send() + logging.getLogger("plane").info("Email sent successfully.") return except Exception as e: - print(e) - capture_exception(e) - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/notification_task.py b/apiserver/plane/bgtasks/notification_task.py index 0a843e4a63a..5725abc624b 100644 --- a/apiserver/plane/bgtasks/notification_task.py +++ b/apiserver/plane/bgtasks/notification_task.py @@ -40,7 +40,9 @@ def update_mentions_for_issue(issue, project, new_mentions, removed_mention): ) IssueMention.objects.bulk_create(aggregated_issue_mentions, batch_size=100) - IssueMention.objects.filter(issue=issue, mention__in=removed_mention).delete() + IssueMention.objects.filter( + issue=issue, mention__in=removed_mention + ).delete() def get_new_mentions(requested_instance, current_instance): @@ -92,7 +94,9 @@ def extract_mentions_as_subscribers(project_id, issue_id, mentions): project_id=project_id, ).exists() and not IssueAssignee.objects.filter( - project_id=project_id, issue_id=issue_id, assignee_id=mention_id + project_id=project_id, + issue_id=issue_id, + assignee_id=mention_id, ).exists() and not Issue.objects.filter( project_id=project_id, pk=issue_id, created_by_id=mention_id @@ -120,12 +124,14 @@ def extract_mentions(issue_instance): data = json.loads(issue_instance) html = data.get("description_html") soup = BeautifulSoup(html, "html.parser") - mention_tags = soup.find_all("mention-component", attrs={"target": "users"}) + mention_tags = soup.find_all( + "mention-component", attrs={"target": "users"} + ) mentions = [mention_tag["id"] for mention_tag in mention_tags] return list(set(mentions)) - except Exception as e: + except Exception: return [] @@ -134,11 +140,13 @@ def extract_comment_mentions(comment_value): try: mentions = [] soup = BeautifulSoup(comment_value, "html.parser") - mentions_tags = soup.find_all("mention-component", attrs={"target": "users"}) + mentions_tags = soup.find_all( + "mention-component", attrs={"target": "users"} + ) for mention_tag in mentions_tags: mentions.append(mention_tag["id"]) return list(set(mentions)) - except Exception as e: + except Exception: return [] @@ -157,7 +165,13 @@ def get_new_comment_mentions(new_value, old_value): def create_mention_notification( - project, notification_comment, issue, actor_id, mention_id, issue_id, activity + project, + notification_comment, + issue, + actor_id, + mention_id, + issue_id, + activity, ): return Notification( workspace=project.workspace, @@ -304,9 +318,11 @@ def notifications( # add the user to issue subscriber try: _ = IssueSubscriber.objects.get_or_create( - project_id=project_id, issue_id=issue_id, subscriber_id=actor_id + project_id=project_id, + issue_id=issue_id, + subscriber_id=actor_id, ) - except Exception as e: + except Exception: pass project = Project.objects.get(pk=project_id) @@ -334,11 +350,14 @@ def notifications( user_id=subscriber ) - for issue_activity in issue_activities_created: + for issue_activity in issue_activities_created: # If activity done in blocking then blocked by email should not go - if issue_activity.get("issue_detail").get("id") != issue_id: - continue; - + if ( + issue_activity.get("issue_detail").get("id") + != issue_id + ): + continue + # Do not send notification for description update if issue_activity.get("field") == "description": continue @@ -471,7 +490,9 @@ def notifications( if issue_comment is not None else "" ), - "activity_time": issue_activity.get("created_at"), + "activity_time": issue_activity.get( + "created_at" + ), }, }, ) @@ -552,7 +573,9 @@ def notifications( "old_value": str( issue_activity.get("old_value") ), - "activity_time": issue_activity.get("created_at"), + "activity_time": issue_activity.get( + "created_at" + ), }, }, ) @@ -640,7 +663,9 @@ def notifications( "old_value": str( last_activity.old_value ), - "activity_time": issue_activity.get("created_at"), + "activity_time": issue_activity.get( + "created_at" + ), }, }, ) @@ -697,7 +722,9 @@ def notifications( "old_value" ) ), - "activity_time": issue_activity.get("created_at"), + "activity_time": issue_activity.get( + "created_at" + ), }, }, ) diff --git a/apiserver/plane/bgtasks/project_invitation_task.py b/apiserver/plane/bgtasks/project_invitation_task.py index a986de33282..b60c49da170 100644 --- a/apiserver/plane/bgtasks/project_invitation_task.py +++ b/apiserver/plane/bgtasks/project_invitation_task.py @@ -1,19 +1,18 @@ -# Python import -import os +# Python imports +import logging + +# Third party imports +from celery import shared_task # Django imports from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings - -# Third party imports -from celery import shared_task -from sentry_sdk import capture_exception # Module imports -from plane.db.models import Project, User, ProjectMemberInvite +from plane.db.models import Project, ProjectMemberInvite, User from plane.license.utils.instance_value import get_email_configuration +from plane.utils.exception_logger import log_exception @shared_task @@ -53,6 +52,7 @@ def project_invitation(email, project_id, token, current_site, invitor): EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() @@ -62,6 +62,7 @@ def project_invitation(email, project_id, token, current_site, invitor): username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -74,12 +75,10 @@ def project_invitation(email, project_id, token, current_site, invitor): msg.attach_alternative(html_content, "text/html") msg.send() + logging.getLogger("plane").info("Email sent successfully.") return - except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist) as e: + except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist): return except Exception as e: - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/webhook_task.py b/apiserver/plane/bgtasks/webhook_task.py index 605f48dd944..6537e3b21f6 100644 --- a/apiserver/plane/bgtasks/webhook_task.py +++ b/apiserver/plane/bgtasks/webhook_task.py @@ -1,44 +1,45 @@ -import requests -import uuid import hashlib -import json import hmac +import json +import logging +import uuid + +import requests + +# Third party imports +from celery import shared_task # Django imports from django.conf import settings -from django.core.serializers.json import DjangoJSONEncoder from django.core.mail import EmailMultiAlternatives, get_connection +from django.core.serializers.json import DjangoJSONEncoder from django.template.loader import render_to_string from django.utils.html import strip_tags -# Third party imports -from celery import shared_task -from sentry_sdk import capture_exception - +# Module imports +from plane.api.serializers import ( + CycleIssueSerializer, + CycleSerializer, + IssueCommentSerializer, + IssueExpandSerializer, + ModuleIssueSerializer, + ModuleSerializer, + ProjectSerializer, +) from plane.db.models import ( - Webhook, - WebhookLog, - Project, - Issue, Cycle, - Module, - ModuleIssue, CycleIssue, + Issue, IssueComment, + Module, + ModuleIssue, + Project, User, + Webhook, + WebhookLog, ) -from plane.api.serializers import ( - ProjectSerializer, - CycleSerializer, - ModuleSerializer, - CycleIssueSerializer, - ModuleIssueSerializer, - IssueCommentSerializer, - IssueExpandSerializer, -) - -# Module imports from plane.license.utils.instance_value import get_email_configuration +from plane.utils.exception_logger import log_exception SERIALIZER_MAPPER = { "project": ProjectSerializer, @@ -159,7 +160,7 @@ def webhook_task(self, webhook, slug, event, event_data, action, current_site): ) # Retry logic if self.request.retries >= self.max_retries: - Webhook.objects.filter(pk=webhook.id).update(is_active=False) + Webhook.objects.filter(pk=webhook.id).update(is_active=False) if webhook: # send email for the deactivation of the webhook send_webhook_deactivation_email( @@ -174,7 +175,7 @@ def webhook_task(self, webhook, slug, event, event_data, action, current_site): except Exception as e: if settings.DEBUG: print(e) - capture_exception(e) + log_exception(e) return @@ -215,9 +216,11 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site): event_data = [ get_model_data( event=event, - event_id=payload.get("id") - if isinstance(payload, dict) - else None, + event_id=( + payload.get("id") + if isinstance(payload, dict) + else None + ), many=False, ) ] @@ -239,12 +242,14 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site): except Exception as e: if settings.DEBUG: print(e) - capture_exception(e) + log_exception(e) return @shared_task -def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reason): +def send_webhook_deactivation_email( + webhook_id, receiver_id, current_site, reason +): # Get email configurations ( EMAIL_HOST, @@ -252,19 +257,22 @@ def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reaso EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() receiver = User.objects.get(pk=receiver_id) - webhook = Webhook.objects.get(pk=webhook_id) - subject="Webhook Deactivated" - message=f"Webhook {webhook.url} has been deactivated due to failed requests." + webhook = Webhook.objects.get(pk=webhook_id) + subject = "Webhook Deactivated" + message = ( + f"Webhook {webhook.url} has been deactivated due to failed requests." + ) # Send the mail context = { "email": receiver.email, "message": message, - "webhook_url":f"{current_site}/{str(webhook.workspace.slug)}/settings/webhooks/{str(webhook.id)}", + "webhook_url": f"{current_site}/{str(webhook.workspace.slug)}/settings/webhooks/{str(webhook.id)}", } html_content = render_to_string( "emails/notifications/webhook-deactivate.html", context @@ -278,6 +286,7 @@ def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reaso username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -289,8 +298,8 @@ def send_webhook_deactivation_email(webhook_id, receiver_id, current_site, reaso ) msg.attach_alternative(html_content, "text/html") msg.send() - + logging.getLogger("plane").info("Email sent successfully.") return except Exception as e: - print(e) + log_exception(e) return diff --git a/apiserver/plane/bgtasks/workspace_invitation_task.py b/apiserver/plane/bgtasks/workspace_invitation_task.py index 06dd6e8cd56..c0b945e62b4 100644 --- a/apiserver/plane/bgtasks/workspace_invitation_task.py +++ b/apiserver/plane/bgtasks/workspace_invitation_task.py @@ -1,23 +1,18 @@ # Python imports -import os -import requests -import json +import logging + +# Third party imports +from celery import shared_task # Django imports from django.core.mail import EmailMultiAlternatives, get_connection from django.template.loader import render_to_string from django.utils.html import strip_tags -from django.conf import settings - -# Third party imports -from celery import shared_task -from sentry_sdk import capture_exception -from slack_sdk import WebClient -from slack_sdk.errors import SlackApiError # Module imports -from plane.db.models import Workspace, WorkspaceMemberInvite, User +from plane.db.models import User, Workspace, WorkspaceMemberInvite from plane.license.utils.instance_value import get_email_configuration +from plane.utils.exception_logger import log_exception @shared_task @@ -42,6 +37,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor): EMAIL_HOST_PASSWORD, EMAIL_PORT, EMAIL_USE_TLS, + EMAIL_USE_SSL, EMAIL_FROM, ) = get_email_configuration() @@ -70,6 +66,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor): username=EMAIL_HOST_USER, password=EMAIL_HOST_PASSWORD, use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", ) msg = EmailMultiAlternatives( @@ -81,14 +78,12 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor): ) msg.attach_alternative(html_content, "text/html") msg.send() + logging.getLogger("plane").info("Email sent succesfully") return except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e: - print("Workspace or WorkspaceMember Invite Does not exists") + log_exception(e) return except Exception as e: - # Print logs if in DEBUG mode - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return diff --git a/apiserver/plane/celery.py b/apiserver/plane/celery.py index 0912e276af4..056dfb16bc5 100644 --- a/apiserver/plane/celery.py +++ b/apiserver/plane/celery.py @@ -2,7 +2,6 @@ from celery import Celery from plane.settings.redis import redis_instance from celery.schedules import crontab -from django.utils.timezone import timedelta # Set the default Django settings module for the 'celery' program. os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production") @@ -31,7 +30,7 @@ }, "check-every-five-minutes-to-send-email-notifications": { "task": "plane.bgtasks.email_notification_task.stack_email_notification", - "schedule": crontab(minute='*/5') + "schedule": crontab(minute="*/5"), }, } diff --git a/apiserver/plane/db/management/commands/clear_cache.py b/apiserver/plane/db/management/commands/clear_cache.py new file mode 100644 index 00000000000..4dfbe6c1032 --- /dev/null +++ b/apiserver/plane/db/management/commands/clear_cache.py @@ -0,0 +1,17 @@ +# Django imports +from django.core.cache import cache +from django.core.management import BaseCommand + + +class Command(BaseCommand): + help = "Clear Cache before starting the server to remove stale values" + + def handle(self, *args, **options): + try: + cache.clear() + self.stdout.write(self.style.SUCCESS("Cache Cleared")) + return + except Exception: + # Another ClientError occurred + self.stdout.write(self.style.ERROR("Failed to clear cache")) + return diff --git a/apiserver/plane/db/management/commands/reset_password.py b/apiserver/plane/db/management/commands/reset_password.py index d48c24b1cdb..bca6c356015 100644 --- a/apiserver/plane/db/management/commands/reset_password.py +++ b/apiserver/plane/db/management/commands/reset_password.py @@ -52,5 +52,5 @@ def handle(self, *args, **options): user.save() self.stdout.write( - self.style.SUCCESS(f"User password updated succesfully") + self.style.SUCCESS("User password updated succesfully") ) diff --git a/apiserver/plane/db/management/commands/test_email.py b/apiserver/plane/db/management/commands/test_email.py new file mode 100644 index 00000000000..99c5d9684f4 --- /dev/null +++ b/apiserver/plane/db/management/commands/test_email.py @@ -0,0 +1,63 @@ +from django.core.mail import EmailMultiAlternatives, get_connection +from django.core.management import BaseCommand, CommandError + +from plane.license.utils.instance_value import get_email_configuration + + +class Command(BaseCommand): + """Django command to pause execution until db is available""" + + def add_arguments(self, parser): + # Positional argument + parser.add_argument("to_email", type=str, help="receiver's email") + + def handle(self, *args, **options): + receiver_email = options.get("to_email") + + if not receiver_email: + raise CommandError("Reciever email is required") + + ( + EMAIL_HOST, + EMAIL_HOST_USER, + EMAIL_HOST_PASSWORD, + EMAIL_PORT, + EMAIL_USE_TLS, + EMAIL_USE_SSL, + EMAIL_FROM, + ) = get_email_configuration() + + connection = get_connection( + host=EMAIL_HOST, + port=int(EMAIL_PORT), + username=EMAIL_HOST_USER, + password=EMAIL_HOST_PASSWORD, + use_tls=EMAIL_USE_TLS == "1", + use_ssl=EMAIL_USE_SSL == "1", + timeout=30, + ) + # Prepare email details + subject = "Email Notification from Plane" + message = ( + "This is a sample email notification sent from Plane application." + ) + + self.stdout.write(self.style.SUCCESS("Trying to send test email...")) + + # Send the email + try: + msg = EmailMultiAlternatives( + subject=subject, + body=message, + from_email=EMAIL_FROM, + to=[receiver_email], + connection=connection, + ) + msg.send() + self.stdout.write(self.style.SUCCESS("Email succesfully sent")) + except Exception as e: + self.stdout.write( + self.style.ERROR( + f"Error: Email could not be delivered due to {e}" + ) + ) diff --git a/apiserver/plane/db/management/commands/wait_for_migrations.py b/apiserver/plane/db/management/commands/wait_for_migrations.py index 51f2cf33930..91c8a4ce8c7 100644 --- a/apiserver/plane/db/management/commands/wait_for_migrations.py +++ b/apiserver/plane/db/management/commands/wait_for_migrations.py @@ -4,15 +4,18 @@ from django.db.migrations.executor import MigrationExecutor from django.db import connections, DEFAULT_DB_ALIAS + class Command(BaseCommand): - help = 'Wait for database migrations to complete before starting Celery worker/beat' + help = "Wait for database migrations to complete before starting Celery worker/beat" def handle(self, *args, **kwargs): while self._pending_migrations(): self.stdout.write("Waiting for database migrations to complete...") time.sleep(10) # wait for 10 seconds before checking again - self.stdout.write(self.style.SUCCESS("No migrations Pending. Starting processes ...")) + self.stdout.write( + self.style.SUCCESS("No migrations Pending. Starting processes ...") + ) def _pending_migrations(self): connection = connections[DEFAULT_DB_ALIAS] diff --git a/apiserver/plane/db/migrations/0038_auto_20230720_1505.py b/apiserver/plane/db/migrations/0038_auto_20230720_1505.py index 53e50ed41e3..5f11d9adea9 100644 --- a/apiserver/plane/db/migrations/0038_auto_20230720_1505.py +++ b/apiserver/plane/db/migrations/0038_auto_20230720_1505.py @@ -1,6 +1,6 @@ # Generated by Django 4.2.3 on 2023-07-20 09:35 -from django.db import migrations, models +from django.db import migrations def restructure_theming(apps, schema_editor): diff --git a/apiserver/plane/db/migrations/0054_dashboard_widget_dashboardwidget.py b/apiserver/plane/db/migrations/0054_dashboard_widget_dashboardwidget.py index 933c229a159..b3b5cc8c192 100644 --- a/apiserver/plane/db/migrations/0054_dashboard_widget_dashboardwidget.py +++ b/apiserver/plane/db/migrations/0054_dashboard_widget_dashboardwidget.py @@ -7,71 +7,204 @@ class Migration(migrations.Migration): - dependencies = [ - ('db', '0053_auto_20240102_1315'), + ("db", "0053_auto_20240102_1315"), ] operations = [ migrations.CreateModel( - name='Dashboard', + name="Dashboard", fields=[ - ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), - ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), - ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), - ('name', models.CharField(max_length=255)), - ('description_html', models.TextField(blank=True, default='

')), - ('identifier', models.UUIDField(null=True)), - ('is_default', models.BooleanField(default=False)), - ('type_identifier', models.CharField(choices=[('workspace', 'Workspace'), ('project', 'Project'), ('home', 'Home'), ('team', 'Team'), ('user', 'User')], default='home', max_length=30, verbose_name='Dashboard Type')), - ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), - ('owned_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dashboards', to=settings.AUTH_USER_MODEL)), - ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ( + "created_at", + models.DateTimeField( + auto_now_add=True, verbose_name="Created At" + ), + ), + ( + "updated_at", + models.DateTimeField( + auto_now=True, verbose_name="Last Modified At" + ), + ), + ( + "id", + models.UUIDField( + db_index=True, + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + unique=True, + ), + ), + ("name", models.CharField(max_length=255)), + ( + "description_html", + models.TextField(blank=True, default="

"), + ), + ("identifier", models.UUIDField(null=True)), + ("is_default", models.BooleanField(default=False)), + ( + "type_identifier", + models.CharField( + choices=[ + ("workspace", "Workspace"), + ("project", "Project"), + ("home", "Home"), + ("team", "Team"), + ("user", "User"), + ], + default="home", + max_length=30, + verbose_name="Dashboard Type", + ), + ), + ( + "created_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_created_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Created By", + ), + ), + ( + "owned_by", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="dashboards", + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "updated_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_updated_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Last Modified By", + ), + ), ], options={ - 'verbose_name': 'Dashboard', - 'verbose_name_plural': 'Dashboards', - 'db_table': 'dashboards', - 'ordering': ('-created_at',), + "verbose_name": "Dashboard", + "verbose_name_plural": "Dashboards", + "db_table": "dashboards", + "ordering": ("-created_at",), }, ), migrations.CreateModel( - name='Widget', + name="Widget", fields=[ - ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), - ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), - ('key', models.CharField(max_length=255)), - ('filters', models.JSONField(default=dict)), + ( + "id", + models.UUIDField( + db_index=True, + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + unique=True, + ), + ), + ( + "created_at", + models.DateTimeField( + auto_now_add=True, verbose_name="Created At" + ), + ), + ( + "updated_at", + models.DateTimeField( + auto_now=True, verbose_name="Last Modified At" + ), + ), + ("key", models.CharField(max_length=255)), + ("filters", models.JSONField(default=dict)), ], options={ - 'verbose_name': 'Widget', - 'verbose_name_plural': 'Widgets', - 'db_table': 'widgets', - 'ordering': ('-created_at',), + "verbose_name": "Widget", + "verbose_name_plural": "Widgets", + "db_table": "widgets", + "ordering": ("-created_at",), }, ), migrations.CreateModel( - name='DashboardWidget', + name="DashboardWidget", fields=[ - ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), - ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), - ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), - ('is_visible', models.BooleanField(default=True)), - ('sort_order', models.FloatField(default=65535)), - ('filters', models.JSONField(default=dict)), - ('properties', models.JSONField(default=dict)), - ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), - ('dashboard', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dashboard_widgets', to='db.dashboard')), - ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), - ('widget', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dashboard_widgets', to='db.widget')), + ( + "created_at", + models.DateTimeField( + auto_now_add=True, verbose_name="Created At" + ), + ), + ( + "updated_at", + models.DateTimeField( + auto_now=True, verbose_name="Last Modified At" + ), + ), + ( + "id", + models.UUIDField( + db_index=True, + default=uuid.uuid4, + editable=False, + primary_key=True, + serialize=False, + unique=True, + ), + ), + ("is_visible", models.BooleanField(default=True)), + ("sort_order", models.FloatField(default=65535)), + ("filters", models.JSONField(default=dict)), + ("properties", models.JSONField(default=dict)), + ( + "created_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_created_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Created By", + ), + ), + ( + "dashboard", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="dashboard_widgets", + to="db.dashboard", + ), + ), + ( + "updated_by", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.SET_NULL, + related_name="%(class)s_updated_by", + to=settings.AUTH_USER_MODEL, + verbose_name="Last Modified By", + ), + ), + ( + "widget", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="dashboard_widgets", + to="db.widget", + ), + ), ], options={ - 'verbose_name': 'Dashboard Widget', - 'verbose_name_plural': 'Dashboard Widgets', - 'db_table': 'dashboard_widgets', - 'ordering': ('-created_at',), - 'unique_together': {('widget', 'dashboard')}, + "verbose_name": "Dashboard Widget", + "verbose_name_plural": "Dashboard Widgets", + "db_table": "dashboard_widgets", + "ordering": ("-created_at",), + "unique_together": {("widget", "dashboard")}, }, ), ] diff --git a/apiserver/plane/db/migrations/0055_auto_20240108_0648.py b/apiserver/plane/db/migrations/0055_auto_20240108_0648.py index e369c185d6b..b13fcdea150 100644 --- a/apiserver/plane/db/migrations/0055_auto_20240108_0648.py +++ b/apiserver/plane/db/migrations/0055_auto_20240108_0648.py @@ -62,7 +62,7 @@ def create_dashboards(apps, schema_editor): type_identifier="home", is_default=True, ) - for user_id in User.objects.values_list('id', flat=True) + for user_id in User.objects.values_list("id", flat=True) ], batch_size=2000, ) @@ -78,11 +78,13 @@ def create_dashboard_widgets(apps, schema_editor): widget_id=widget_id, dashboard_id=dashboard_id, ) - for widget_id in Widget.objects.values_list('id', flat=True) - for dashboard_id in Dashboard.objects.values_list('id', flat=True) + for widget_id in Widget.objects.values_list("id", flat=True) + for dashboard_id in Dashboard.objects.values_list("id", flat=True) ] - DashboardWidget.objects.bulk_create(updated_dashboard_widget, batch_size=2000) + DashboardWidget.objects.bulk_create( + updated_dashboard_widget, batch_size=2000 + ) class Migration(migrations.Migration): diff --git a/apiserver/plane/db/migrations/0057_auto_20240122_0901.py b/apiserver/plane/db/migrations/0057_auto_20240122_0901.py index 9204d43b3f5..a143917d235 100644 --- a/apiserver/plane/db/migrations/0057_auto_20240122_0901.py +++ b/apiserver/plane/db/migrations/0057_auto_20240122_0901.py @@ -2,12 +2,17 @@ from django.db import migrations + def create_notification_preferences(apps, schema_editor): - UserNotificationPreference = apps.get_model("db", "UserNotificationPreference") + UserNotificationPreference = apps.get_model( + "db", "UserNotificationPreference" + ) User = apps.get_model("db", "User") bulk_notification_preferences = [] - for user_id in User.objects.filter(is_bot=False).values_list("id", flat=True): + for user_id in User.objects.filter(is_bot=False).values_list( + "id", flat=True + ): bulk_notification_preferences.append( UserNotificationPreference( user_id=user_id, @@ -18,11 +23,10 @@ def create_notification_preferences(apps, schema_editor): bulk_notification_preferences, batch_size=1000, ignore_conflicts=True ) + class Migration(migrations.Migration): dependencies = [ ("db", "0056_usernotificationpreference_emailnotificationlog"), ] - operations = [ - migrations.RunPython(create_notification_preferences) - ] + operations = [migrations.RunPython(create_notification_preferences)] diff --git a/apiserver/plane/db/migrations/0058_alter_moduleissue_issue_and_more.py b/apiserver/plane/db/migrations/0058_alter_moduleissue_issue_and_more.py index 6238ef8257c..411cd47bd44 100644 --- a/apiserver/plane/db/migrations/0058_alter_moduleissue_issue_and_more.py +++ b/apiserver/plane/db/migrations/0058_alter_moduleissue_issue_and_more.py @@ -5,19 +5,22 @@ class Migration(migrations.Migration): - dependencies = [ - ('db', '0057_auto_20240122_0901'), + ("db", "0057_auto_20240122_0901"), ] operations = [ migrations.AlterField( - model_name='moduleissue', - name='issue', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_module', to='db.issue'), + model_name="moduleissue", + name="issue", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="issue_module", + to="db.issue", + ), ), migrations.AlterUniqueTogether( - name='moduleissue', - unique_together={('issue', 'module')}, + name="moduleissue", + unique_together={("issue", "module")}, ), ] diff --git a/apiserver/plane/db/migrations/0059_auto_20240208_0957.py b/apiserver/plane/db/migrations/0059_auto_20240208_0957.py index c4c43fa4bf6..30d816a9399 100644 --- a/apiserver/plane/db/migrations/0059_auto_20240208_0957.py +++ b/apiserver/plane/db/migrations/0059_auto_20240208_0957.py @@ -24,10 +24,9 @@ def widgets_filter_change(apps, schema_editor): # Bulk update the widgets Widget.objects.bulk_update(widgets_to_update, ["filters"], batch_size=10) + class Migration(migrations.Migration): dependencies = [ - ('db', '0058_alter_moduleissue_issue_and_more'), - ] - operations = [ - migrations.RunPython(widgets_filter_change) + ("db", "0058_alter_moduleissue_issue_and_more"), ] + operations = [migrations.RunPython(widgets_filter_change)] diff --git a/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py b/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py index 074e20a16b8..575836a3517 100644 --- a/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py +++ b/apiserver/plane/db/migrations/0060_cycle_progress_snapshot.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('db', '0059_auto_20240208_0957'), + ("db", "0059_auto_20240208_0957"), ] operations = [ migrations.AddField( - model_name='cycle', - name='progress_snapshot', + model_name="cycle", + name="progress_snapshot", field=models.JSONField(default=dict), ), ] diff --git a/apiserver/plane/db/migrations/0061_project_logo_props.py b/apiserver/plane/db/migrations/0061_project_logo_props.py new file mode 100644 index 00000000000..d8752d9dd8f --- /dev/null +++ b/apiserver/plane/db/migrations/0061_project_logo_props.py @@ -0,0 +1,54 @@ +# Generated by Django 4.2.7 on 2024-03-03 16:25 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + def update_project_logo_props(apps, schema_editor): + Project = apps.get_model("db", "Project") + + bulk_update_project_logo = [] + # Iterate through projects and update logo_props + for project in Project.objects.all(): + project.logo_props["in_use"] = "emoji" if project.emoji else "icon" + project.logo_props["emoji"] = { + "value": project.emoji if project.emoji else "", + "url": "", + } + project.logo_props["icon"] = { + "name": ( + project.icon_prop.get("name", "") + if project.icon_prop + else "" + ), + "color": ( + project.icon_prop.get("color", "") + if project.icon_prop + else "" + ), + } + bulk_update_project_logo.append(project) + + # Bulk update logo_props for all projects + Project.objects.bulk_update( + bulk_update_project_logo, ["logo_props"], batch_size=1000 + ) + + dependencies = [ + ("db", "0060_cycle_progress_snapshot"), + ] + + operations = [ + migrations.AlterField( + model_name="issuelink", + name="url", + field=models.TextField(), + ), + migrations.AddField( + model_name="project", + name="logo_props", + field=models.JSONField(default=dict), + ), + migrations.RunPython(update_project_logo_props), + ] diff --git a/apiserver/plane/db/migrations/0062_cycle_archived_at_module_archived_at_and_more.py b/apiserver/plane/db/migrations/0062_cycle_archived_at_module_archived_at_and_more.py new file mode 100644 index 00000000000..be3f9fc2a12 --- /dev/null +++ b/apiserver/plane/db/migrations/0062_cycle_archived_at_module_archived_at_and_more.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.7 on 2024-03-19 08:28 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0061_project_logo_props'), + ] + + operations = [ + migrations.AddField( + model_name="cycle", + name="archived_at", + field=models.DateTimeField(null=True), + ), + migrations.AddField( + model_name="module", + name="archived_at", + field=models.DateTimeField(null=True), + ), + migrations.AddField( + model_name="project", + name="archived_at", + field=models.DateTimeField(null=True), + ), + migrations.AlterField( + model_name="socialloginconnection", + name="medium", + field=models.CharField( + choices=[ + ("Google", "google"), + ("Github", "github"), + ("Jira", "jira"), + ], + default=None, + max_length=20, + ), + ), + ] diff --git a/apiserver/plane/db/mixins.py b/apiserver/plane/db/mixins.py index 263f9ab9a5f..f1756e5adca 100644 --- a/apiserver/plane/db/mixins.py +++ b/apiserver/plane/db/mixins.py @@ -1,12 +1,10 @@ # Python imports -import uuid # Django imports from django.db import models class TimeAuditModel(models.Model): - """To path when the record was created and last modified""" created_at = models.DateTimeField( @@ -22,7 +20,6 @@ class Meta: class UserAuditModel(models.Model): - """To path when the record was created and last modified""" created_by = models.ForeignKey( @@ -45,7 +42,6 @@ class Meta: class AuditModel(TimeAuditModel, UserAuditModel): - """To path when the record was created and last modified""" class Meta: diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index d9096bd01f0..daa793c37a7 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -85,10 +85,14 @@ from .analytic import AnalyticView -from .notification import Notification, UserNotificationPreference, EmailNotificationLog +from .notification import ( + Notification, + UserNotificationPreference, + EmailNotificationLog, +) from .exporter import ExporterHistory from .webhook import Webhook, WebhookLog -from .dashboard import Dashboard, DashboardWidget, Widget \ No newline at end of file +from .dashboard import Dashboard, DashboardWidget, Widget diff --git a/apiserver/plane/db/models/analytic.py b/apiserver/plane/db/models/analytic.py index d097051afb9..68747e8c491 100644 --- a/apiserver/plane/db/models/analytic.py +++ b/apiserver/plane/db/models/analytic.py @@ -1,6 +1,5 @@ # Django models from django.db import models -from django.conf import settings from .base import BaseModel diff --git a/apiserver/plane/db/models/cycle.py b/apiserver/plane/db/models/cycle.py index d802dbc1e09..15a8251d767 100644 --- a/apiserver/plane/db/models/cycle.py +++ b/apiserver/plane/db/models/cycle.py @@ -69,6 +69,7 @@ class Cycle(ProjectBaseModel): external_source = models.CharField(max_length=255, null=True, blank=True) external_id = models.CharField(max_length=255, blank=True, null=True) progress_snapshot = models.JSONField(default=dict) + archived_at = models.DateTimeField(null=True) class Meta: verbose_name = "Cycle" diff --git a/apiserver/plane/db/models/dashboard.py b/apiserver/plane/db/models/dashboard.py index 05c5a893f4e..d07a7072857 100644 --- a/apiserver/plane/db/models/dashboard.py +++ b/apiserver/plane/db/models/dashboard.py @@ -2,12 +2,12 @@ # Django imports from django.db import models -from django.conf import settings # Module imports from . import BaseModel from ..mixins import TimeAuditModel + class Dashboard(BaseModel): DASHBOARD_CHOICES = ( ("workspace", "Workspace"), @@ -45,7 +45,11 @@ class Meta: class Widget(TimeAuditModel): id = models.UUIDField( - default=uuid.uuid4, unique=True, editable=False, db_index=True, primary_key=True + default=uuid.uuid4, + unique=True, + editable=False, + db_index=True, + primary_key=True, ) key = models.CharField(max_length=255) filters = models.JSONField(default=dict) diff --git a/apiserver/plane/db/models/integration/github.py b/apiserver/plane/db/models/integration/github.py index f3331c87451..6a00dc6900f 100644 --- a/apiserver/plane/db/models/integration/github.py +++ b/apiserver/plane/db/models/integration/github.py @@ -1,5 +1,4 @@ # Python imports -import uuid # Django imports from django.db import models diff --git a/apiserver/plane/db/models/integration/slack.py b/apiserver/plane/db/models/integration/slack.py index 72df4dfd737..1f07179b7a8 100644 --- a/apiserver/plane/db/models/integration/slack.py +++ b/apiserver/plane/db/models/integration/slack.py @@ -1,5 +1,4 @@ # Python imports -import uuid # Django imports from django.db import models diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index d5ed4247a74..0a59acb9361 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -91,6 +91,7 @@ def get_queryset(self): | models.Q(issue_inbox__isnull=True) ) .exclude(archived_at__isnull=False) + .exclude(project__archived_at__isnull=False) .exclude(is_draft=True) ) @@ -320,7 +321,7 @@ def __str__(self): class IssueLink(ProjectBaseModel): title = models.CharField(max_length=255, null=True, blank=True) - url = models.URLField() + url = models.TextField() issue = models.ForeignKey( "db.Issue", on_delete=models.CASCADE, related_name="issue_link" ) diff --git a/apiserver/plane/db/models/module.py b/apiserver/plane/db/models/module.py index 9af4e120e74..b201e4d7f20 100644 --- a/apiserver/plane/db/models/module.py +++ b/apiserver/plane/db/models/module.py @@ -92,6 +92,7 @@ class Module(ProjectBaseModel): sort_order = models.FloatField(default=65535) external_source = models.CharField(max_length=255, null=True, blank=True) external_id = models.CharField(max_length=255, blank=True, null=True) + archived_at = models.DateTimeField(null=True) class Meta: unique_together = ["name", "project"] diff --git a/apiserver/plane/db/models/notification.py b/apiserver/plane/db/models/notification.py index b42ae54a927..9138ece9fae 100644 --- a/apiserver/plane/db/models/notification.py +++ b/apiserver/plane/db/models/notification.py @@ -5,6 +5,7 @@ # Module imports from . import BaseModel + class Notification(BaseModel): workspace = models.ForeignKey( "db.Workspace", related_name="notifications", on_delete=models.CASCADE @@ -105,10 +106,19 @@ def __str__(self): """Return the user""" return f"<{self.user}>" + class EmailNotificationLog(BaseModel): # receiver - receiver = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="email_notifications") - triggered_by = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="triggered_emails") + receiver = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="email_notifications", + ) + triggered_by = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.CASCADE, + related_name="triggered_emails", + ) # entity - can be issues, pages, etc. entity_identifier = models.UUIDField(null=True) entity_name = models.CharField(max_length=255) diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index b9317472433..db5ebf33bf6 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -107,12 +107,14 @@ class Project(BaseModel): close_in = models.IntegerField( default=0, validators=[MinValueValidator(0), MaxValueValidator(12)] ) + logo_props = models.JSONField(default=dict) default_state = models.ForeignKey( "db.State", on_delete=models.SET_NULL, null=True, related_name="default_state", ) + archived_at = models.DateTimeField(null=True) def __str__(self): """Return name of the project""" diff --git a/apiserver/plane/db/models/social_connection.py b/apiserver/plane/db/models/social_connection.py index 938a73a627a..73028e4198c 100644 --- a/apiserver/plane/db/models/social_connection.py +++ b/apiserver/plane/db/models/social_connection.py @@ -10,7 +10,7 @@ class SocialLoginConnection(BaseModel): medium = models.CharField( max_length=20, - choices=(("Google", "google"), ("Github", "github")), + choices=(("Google", "google"), ("Github", "github"), ("Jira", "jira")), default=None, ) last_login_at = models.DateTimeField(default=timezone.now, null=True) diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py index 0377ccb8be0..5f932d2ea93 100644 --- a/apiserver/plane/db/models/user.py +++ b/apiserver/plane/db/models/user.py @@ -1,16 +1,17 @@ # Python imports -import uuid -import string import random -import pytz +import string +import uuid -# Django imports -from django.db import models +import pytz from django.contrib.auth.models import ( AbstractBaseUser, - UserManager, PermissionsMixin, + UserManager, ) + +# Django imports +from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from django.utils import timezone @@ -138,13 +139,13 @@ def save(self, *args, **kwargs): super(User, self).save(*args, **kwargs) - @receiver(post_save, sender=User) def create_user_notification(sender, instance, created, **kwargs): # create preferences if created and not instance.is_bot: # Module imports from plane.db.models import UserNotificationPreference + UserNotificationPreference.objects.create( user=instance, property_change=False, diff --git a/apiserver/plane/license/api/views/instance.py b/apiserver/plane/license/api/views/instance.py index 112c68bc89a..627904a16d9 100644 --- a/apiserver/plane/license/api/views/instance.py +++ b/apiserver/plane/license/api/views/instance.py @@ -1,17 +1,11 @@ # Python imports -import json -import os -import requests import uuid -import random -import string # Django imports from django.utils import timezone from django.contrib.auth.hashers import make_password from django.core.validators import validate_email from django.core.exceptions import ValidationError -from django.conf import settings # Third party imports from rest_framework import status @@ -30,9 +24,9 @@ from plane.license.api.permissions import ( InstanceAdminPermission, ) -from plane.db.models import User, WorkspaceMember, ProjectMember +from plane.db.models import User from plane.license.utils.encryption import encrypt_data - +from plane.utils.cache import cache_response, invalidate_cache class InstanceEndpoint(BaseAPIView): def get_permissions(self): @@ -44,6 +38,7 @@ def get_permissions(self): AllowAny(), ] + @cache_response(60 * 60 * 2, user=False) def get(self, request): instance = Instance.objects.first() # get the instance @@ -58,6 +53,7 @@ def get(self, request): data["is_activated"] = True return Response(data, status=status.HTTP_200_OK) + @invalidate_cache(path="/api/instances/", user=False) def patch(self, request): # Get the instance instance = Instance.objects.first() @@ -75,6 +71,7 @@ class InstanceAdminEndpoint(BaseAPIView): InstanceAdminPermission, ] + @invalidate_cache(path="/api/instances/", user=False) # Create an instance admin def post(self, request): email = request.data.get("email", False) @@ -104,6 +101,7 @@ def post(self, request): serializer = InstanceAdminSerializer(instance_admin) return Response(serializer.data, status=status.HTTP_201_CREATED) + @cache_response(60 * 60 * 2) def get(self, request): instance = Instance.objects.first() if instance is None: @@ -115,11 +113,10 @@ def get(self, request): serializer = InstanceAdminSerializer(instance_admins, many=True) return Response(serializer.data, status=status.HTTP_200_OK) + @invalidate_cache(path="/api/instances/", user=False) def delete(self, request, pk): instance = Instance.objects.first() - instance_admin = InstanceAdmin.objects.filter( - instance=instance, pk=pk - ).delete() + InstanceAdmin.objects.filter(instance=instance, pk=pk).delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -128,6 +125,7 @@ class InstanceConfigurationEndpoint(BaseAPIView): InstanceAdminPermission, ] + @cache_response(60 * 60 * 2, user=False) def get(self, request): instance_configurations = InstanceConfiguration.objects.all() serializer = InstanceConfigurationSerializer( @@ -135,6 +133,8 @@ def get(self, request): ) return Response(serializer.data, status=status.HTTP_200_OK) + @invalidate_cache(path="/api/configs/", user=False) + @invalidate_cache(path="/api/mobile-configs/", user=False) def patch(self, request): configurations = InstanceConfiguration.objects.filter( key__in=request.data.keys() @@ -170,6 +170,7 @@ class InstanceAdminSignInEndpoint(BaseAPIView): AllowAny, ] + @invalidate_cache(path="/api/instances/", user=False) def post(self, request): # Check instance first instance = Instance.objects.first() @@ -201,7 +202,7 @@ def post(self, request): email = email.strip().lower() try: validate_email(email) - except ValidationError as e: + except ValidationError: return Response( {"error": "Please provide a valid email address."}, status=status.HTTP_400_BAD_REQUEST, @@ -260,6 +261,7 @@ class SignUpScreenVisitedEndpoint(BaseAPIView): AllowAny, ] + @invalidate_cache(path="/api/instances/", user=False) def post(self, request): instance = Instance.objects.first() if instance is None: diff --git a/apiserver/plane/license/management/commands/configure_instance.py b/apiserver/plane/license/management/commands/configure_instance.py index 99c309db326..cf8e37c73c3 100644 --- a/apiserver/plane/license/management/commands/configure_instance.py +++ b/apiserver/plane/license/management/commands/configure_instance.py @@ -3,7 +3,6 @@ # Django imports from django.core.management.base import BaseCommand -from django.conf import settings # Module imports from plane.license.models import InstanceConfiguration diff --git a/apiserver/plane/license/management/commands/register_instance.py b/apiserver/plane/license/management/commands/register_instance.py index 889cd46dc12..32a37879ff1 100644 --- a/apiserver/plane/license/management/commands/register_instance.py +++ b/apiserver/plane/license/management/commands/register_instance.py @@ -1,6 +1,5 @@ # Python imports import json -import requests import secrets # Django imports @@ -56,9 +55,9 @@ def handle(self, *args, **options): user_count=payload.get("user_count", 0), ) - self.stdout.write(self.style.SUCCESS(f"Instance registered")) + self.stdout.write(self.style.SUCCESS("Instance registered")) else: self.stdout.write( - self.style.SUCCESS(f"Instance already registered") + self.style.SUCCESS("Instance already registered") ) return diff --git a/apiserver/plane/license/utils/instance_value.py b/apiserver/plane/license/utils/instance_value.py index bc4fd5d21f2..4c191fedac1 100644 --- a/apiserver/plane/license/utils/instance_value.py +++ b/apiserver/plane/license/utils/instance_value.py @@ -64,6 +64,10 @@ def get_email_configuration(): "key": "EMAIL_USE_TLS", "default": os.environ.get("EMAIL_USE_TLS", "1"), }, + { + "key": "EMAIL_USE_SSL", + "default": os.environ.get("EMAIL_USE_SSL", "0"), + }, { "key": "EMAIL_FROM", "default": os.environ.get( diff --git a/apiserver/plane/middleware/api_log_middleware.py b/apiserver/plane/middleware/api_log_middleware.py index a49d43b55a1..96c62c2fd9d 100644 --- a/apiserver/plane/middleware/api_log_middleware.py +++ b/apiserver/plane/middleware/api_log_middleware.py @@ -1,4 +1,4 @@ -from plane.db.models import APIToken, APIActivityLog +from plane.db.models import APIActivityLog class APITokenLogMiddleware: @@ -39,6 +39,5 @@ def process_request(self, request, response, request_body): except Exception as e: print(e) # If the token does not exist, you can decide whether to log this as an invalid attempt - pass return None diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index 5c8947e73be..886ad4cb434 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -3,19 +3,20 @@ # Python imports import os import ssl -import certifi from datetime import timedelta from urllib.parse import urlparse -# Django imports -from django.core.management.utils import get_random_secret_key +import certifi # Third party imports import dj_database_url import sentry_sdk + +# Django imports +from django.core.management.utils import get_random_secret_key +from sentry_sdk.integrations.celery import CeleryIntegration from sentry_sdk.integrations.django import DjangoIntegration from sentry_sdk.integrations.redis import RedisIntegration -from sentry_sdk.integrations.celery import CeleryIntegration BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @@ -23,7 +24,7 @@ SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key()) # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = False +DEBUG = int(os.environ.get("DEBUG", "0")) # Allowed Hosts ALLOWED_HOSTS = ["*"] diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py index 8f27d423418..b00684eae32 100644 --- a/apiserver/plane/settings/local.py +++ b/apiserver/plane/settings/local.py @@ -1,11 +1,14 @@ """Development settings""" + +import os + from .common import * # noqa DEBUG = True # Debug Toolbar settings -INSTALLED_APPS += ("debug_toolbar",) -MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) +INSTALLED_APPS += ("debug_toolbar",) # noqa +MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) # noqa DEBUG_TOOLBAR_PATCH_SETTINGS = False @@ -14,14 +17,18 @@ CACHES = { "default": { - "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": REDIS_URL, # noqa + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", + }, } } INTERNAL_IPS = ("127.0.0.1",) MEDIA_URL = "/uploads/" -MEDIA_ROOT = os.path.join(BASE_DIR, "uploads") +MEDIA_ROOT = os.path.join(BASE_DIR, "uploads") # noqa CORS_ALLOWED_ORIGINS = [ "http://localhost:3000", @@ -29,3 +36,38 @@ "http://localhost:4000", "http://127.0.0.1:4000", ] + +LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa + +if not os.path.exists(LOG_DIR): + os.makedirs(LOG_DIR) + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", + }, + }, + "handlers": { + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "verbose", + }, + }, + "loggers": { + "django.request": { + "handlers": ["console"], + "level": "DEBUG", + "propagate": False, + }, + "plane": { + "handlers": ["console"], + "level": "DEBUG", + "propagate": False, + }, + }, +} diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py index 90eb04dd59d..caf6804a303 100644 --- a/apiserver/plane/settings/production.py +++ b/apiserver/plane/settings/production.py @@ -1,13 +1,16 @@ """Production settings""" + +import os + from .common import * # noqa # SECURITY WARNING: don't run with debug turned on in production! DEBUG = int(os.environ.get("DEBUG", 0)) == 1 - +DEBUG = True # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") -INSTALLED_APPS += ("scout_apm.django",) +INSTALLED_APPS += ("scout_apm.django",) # noqa # Honor the 'X-Forwarded-Proto' header for request.is_secure() SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") @@ -16,3 +19,62 @@ SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False) SCOUT_KEY = os.environ.get("SCOUT_KEY", "") SCOUT_NAME = "Plane" + +LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa + +if not os.path.exists(LOG_DIR): + os.makedirs(LOG_DIR) + + +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", + }, + "json": { + "()": "pythonjsonlogger.jsonlogger.JsonFormatter", + "fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s", + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "verbose", + "level": "INFO", + }, + "file": { + "class": "plane.utils.logging.SizedTimedRotatingFileHandler", + "filename": ( + os.path.join(BASE_DIR, "logs", "plane-debug.log") # noqa + if DEBUG + else os.path.join(BASE_DIR, "logs", "plane-error.log") # noqa + ), + "when": "s", + "maxBytes": 1024 * 1024 * 1, + "interval": 1, + "backupCount": 5, + "formatter": "json", + "level": "DEBUG" if DEBUG else "ERROR", + }, + }, + "loggers": { + "django": { + "handlers": ["console", "file"], + "level": "INFO", + "propagate": True, + }, + "django.request": { + "handlers": ["console", "file"], + "level": "INFO", + "propagate": False, + }, + "plane": { + "level": "DEBUG" if DEBUG else "ERROR", + "handlers": ["console", "file"], + "propagate": False, + }, + }, +} diff --git a/apiserver/plane/settings/redis.py b/apiserver/plane/settings/redis.py index 5b09a127719..628a3d8e63b 100644 --- a/apiserver/plane/settings/redis.py +++ b/apiserver/plane/settings/redis.py @@ -1,4 +1,3 @@ -import os import redis from django.conf import settings from urllib.parse import urlparse diff --git a/apiserver/plane/settings/test.py b/apiserver/plane/settings/test.py index 1e2a5514424..a86b044a359 100644 --- a/apiserver/plane/settings/test.py +++ b/apiserver/plane/settings/test.py @@ -1,4 +1,5 @@ """Test Settings""" + from .common import * # noqa DEBUG = True @@ -6,6 +7,6 @@ # Send it in a dummy outbox EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" -INSTALLED_APPS.append( +INSTALLED_APPS.append( # noqa "plane.tests", ) diff --git a/apiserver/plane/space/views/base.py b/apiserver/plane/space/views/base.py index b75f3dd1844..023f27bbc10 100644 --- a/apiserver/plane/space/views/base.py +++ b/apiserver/plane/space/views/base.py @@ -1,26 +1,25 @@ # Python imports import zoneinfo +from django.conf import settings +from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django.db import IntegrityError # Django imports from django.urls import resolve -from django.conf import settings from django.utils import timezone -from django.db import IntegrityError -from django.core.exceptions import ObjectDoesNotExist, ValidationError +from django_filters.rest_framework import DjangoFilterBackend # Third part imports from rest_framework import status -from rest_framework import status -from rest_framework.viewsets import ModelViewSet -from rest_framework.response import Response from rest_framework.exceptions import APIException -from rest_framework.views import APIView from rest_framework.filters import SearchFilter from rest_framework.permissions import IsAuthenticated -from sentry_sdk import capture_exception -from django_filters.rest_framework import DjangoFilterBackend +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework.viewsets import ModelViewSet # Module imports +from plane.utils.exception_logger import log_exception from plane.utils.paginator import BasePaginator @@ -58,7 +57,7 @@ def get_queryset(self): try: return self.model.objects.all() except Exception as e: - capture_exception(e) + log_exception(e) raise APIException( "Please check the view", status.HTTP_400_BAD_REQUEST ) @@ -85,23 +84,19 @@ def handle_exception(self, exc): ) if isinstance(e, ObjectDoesNotExist): - model_name = str(exc).split(" matching query does not exist.")[ - 0 - ] return Response( - {"error": f"The required object does not exist."}, + {"error": "The required object does not exist."}, status=status.HTTP_404_NOT_FOUND, ) if isinstance(e, KeyError): - capture_exception(e) + log_exception(e) return Response( {"error": "The required key does not exist."}, status=status.HTTP_400_BAD_REQUEST, ) - print(e) if settings.DEBUG else print("Server Error") - capture_exception(e) + log_exception(e) return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, @@ -179,7 +174,7 @@ def handle_exception(self, exc): if isinstance(e, ObjectDoesNotExist): return Response( - {"error": f"The required object does not exist."}, + {"error": "The required object does not exist."}, status=status.HTTP_404_NOT_FOUND, ) @@ -189,9 +184,7 @@ def handle_exception(self, exc): status=status.HTTP_400_BAD_REQUEST, ) - if settings.DEBUG: - print(e) - capture_exception(e) + log_exception(e) return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/apiserver/plane/space/views/inbox.py b/apiserver/plane/space/views/inbox.py index 2bf8f8303fa..9f681c16098 100644 --- a/apiserver/plane/space/views/inbox.py +++ b/apiserver/plane/space/views/inbox.py @@ -134,7 +134,7 @@ def create(self, request, slug, project_id, inbox_id): ) # Check for valid priority - if not request.data.get("issue", {}).get("priority", "none") in [ + if request.data.get("issue", {}).get("priority", "none") not in [ "low", "medium", "high", diff --git a/apiserver/plane/space/views/issue.py b/apiserver/plane/space/views/issue.py index 8f7fc0eaa0a..8c4d6e1501f 100644 --- a/apiserver/plane/space/views/issue.py +++ b/apiserver/plane/space/views/issue.py @@ -9,7 +9,6 @@ Func, F, Q, - Count, Case, Value, CharField, @@ -514,9 +513,13 @@ class ProjectIssuesPublicEndpoint(BaseAPIView): ] def get(self, request, slug, project_id): - project_deploy_board = ProjectDeployBoard.objects.get( + if not ProjectDeployBoard.objects.filter( workspace__slug=slug, project_id=project_id - ) + ).exists(): + return Response( + {"error": "Project is not published"}, + status=status.HTTP_404_NOT_FOUND, + ) filters = issue_filters(request.query_params, "GET") diff --git a/apiserver/plane/space/views/project.py b/apiserver/plane/space/views/project.py index 8cd3f55c5c3..10a3c387910 100644 --- a/apiserver/plane/space/views/project.py +++ b/apiserver/plane/space/views/project.py @@ -12,7 +12,6 @@ # Module imports from .base import BaseAPIView from plane.app.serializers import ProjectDeployBoardSerializer -from plane.app.permissions import ProjectMemberPermission from plane.db.models import ( Project, ProjectDeployBoard, diff --git a/apiserver/plane/urls.py b/apiserver/plane/urls.py index 669f3ea73de..3b042ea1fa1 100644 --- a/apiserver/plane/urls.py +++ b/apiserver/plane/urls.py @@ -7,6 +7,7 @@ from django.conf import settings +handler404 = "plane.app.views.error_404.custom_404_view" urlpatterns = [ path("", TemplateView.as_view(template_name="index.html")), diff --git a/apiserver/plane/utils/cache.py b/apiserver/plane/utils/cache.py new file mode 100644 index 00000000000..aece1d644fa --- /dev/null +++ b/apiserver/plane/utils/cache.py @@ -0,0 +1,88 @@ +# Python imports +from functools import wraps + +# Django imports +from django.conf import settings +from django.core.cache import cache + +# Third party imports +from rest_framework.response import Response + + +def generate_cache_key(custom_path, auth_header=None): + """Generate a cache key with the given params""" + if auth_header: + key_data = f"{custom_path}:{auth_header}" + else: + key_data = custom_path + return key_data + + +def cache_response(timeout=60 * 60, path=None, user=True): + """decorator to create cache per user""" + + def decorator(view_func): + @wraps(view_func) + def _wrapped_view(instance, request, *args, **kwargs): + # Function to generate cache key + auth_header = ( + None + if request.user.is_anonymous + else str(request.user.id) if user else None + ) + custom_path = path if path is not None else request.get_full_path() + key = generate_cache_key(custom_path, auth_header) + cached_result = cache.get(key) + if cached_result is not None: + return Response( + cached_result["data"], status=cached_result["status"] + ) + response = view_func(instance, request, *args, **kwargs) + + if response.status_code == 200 and not settings.DEBUG: + cache.set( + key, + {"data": response.data, "status": response.status_code}, + timeout, + ) + + return response + + return _wrapped_view + + return decorator + + +def invalidate_cache(path=None, url_params=False, user=True): + """invalidate cache per user""" + + def decorator(view_func): + @wraps(view_func) + def _wrapped_view(instance, request, *args, **kwargs): + # Invalidate cache before executing the view function + if url_params: + path_with_values = path + for key, value in kwargs.items(): + path_with_values = path_with_values.replace( + f":{key}", str(value) + ) + + custom_path = path_with_values + else: + custom_path = ( + path if path is not None else request.get_full_path() + ) + + auth_header = ( + None + if request.user.is_anonymous + else str(request.user.id) if user else None + ) + key = generate_cache_key(custom_path, auth_header) + cache.delete(key) + # Execute the view function + return view_func(instance, request, *args, **kwargs) + + return _wrapped_view + + return decorator diff --git a/apiserver/plane/utils/exception_logger.py b/apiserver/plane/utils/exception_logger.py new file mode 100644 index 00000000000..f7bb50de236 --- /dev/null +++ b/apiserver/plane/utils/exception_logger.py @@ -0,0 +1,15 @@ +# Python imports +import logging + +# Third party imports +from sentry_sdk import capture_exception + + +def log_exception(e): + # Log the error + logger = logging.getLogger("plane") + logger.error(e) + + # Capture in sentry if configured + capture_exception(e) + return diff --git a/apiserver/plane/utils/importers/__init__.py b/apiserver/plane/utils/importers/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/apiserver/plane/utils/importers/jira.py b/apiserver/plane/utils/importers/jira.py deleted file mode 100644 index 6f3a7c21783..00000000000 --- a/apiserver/plane/utils/importers/jira.py +++ /dev/null @@ -1,117 +0,0 @@ -import requests -import re -from requests.auth import HTTPBasicAuth -from sentry_sdk import capture_exception -from urllib.parse import urlparse, urljoin - - -def is_allowed_hostname(hostname): - allowed_domains = [ - "atl-paas.net", - "atlassian.com", - "atlassian.net", - "jira.com", - ] - parsed_uri = urlparse(f"https://{hostname}") - domain = parsed_uri.netloc.split(":")[0] # Ensures no port is included - base_domain = ".".join(domain.split(".")[-2:]) - return base_domain in allowed_domains - - -def is_valid_project_key(project_key): - if project_key: - project_key = project_key.strip().upper() - # Adjust the regular expression as needed based on your specific requirements. - if len(project_key) > 30: - return False - # Check the validity of the key as well - pattern = re.compile(r"^[A-Z0-9]{1,10}$") - return pattern.match(project_key) is not None - else: - False - - -def generate_valid_project_key(project_key): - return project_key.strip().upper() - - -def generate_url(hostname, path): - if not is_allowed_hostname(hostname): - raise ValueError("Invalid or unauthorized hostname") - return urljoin(f"https://{hostname}", path) - - -def jira_project_issue_summary(email, api_token, project_key, hostname): - try: - if not is_allowed_hostname(hostname): - return {"error": "Invalid or unauthorized hostname"} - - if not is_valid_project_key(project_key): - return {"error": "Invalid project key"} - - auth = HTTPBasicAuth(email, api_token) - headers = {"Accept": "application/json"} - - # make the project key upper case - project_key = generate_valid_project_key(project_key) - - # issues - issue_url = generate_url( - hostname, - f"/rest/api/3/search?jql=project={project_key} AND issuetype!=Epic", - ) - issue_response = requests.request( - "GET", issue_url, headers=headers, auth=auth - ).json()["total"] - - # modules - module_url = generate_url( - hostname, - f"/rest/api/3/search?jql=project={project_key} AND issuetype=Epic", - ) - module_response = requests.request( - "GET", module_url, headers=headers, auth=auth - ).json()["total"] - - # status - status_url = generate_url( - hostname, f"/rest/api/3/project/${project_key}/statuses" - ) - status_response = requests.request( - "GET", status_url, headers=headers, auth=auth - ).json() - - # labels - labels_url = generate_url( - hostname, f"/rest/api/3/label/?jql=project={project_key}" - ) - labels_response = requests.request( - "GET", labels_url, headers=headers, auth=auth - ).json()["total"] - - # users - users_url = generate_url( - hostname, f"/rest/api/3/users/search?jql=project={project_key}" - ) - users_response = requests.request( - "GET", users_url, headers=headers, auth=auth - ).json() - - return { - "issues": issue_response, - "modules": module_response, - "labels": labels_response, - "states": len(status_response), - "users": ( - [ - user - for user in users_response - if user.get("accountType") == "atlassian" - ] - ), - } - except Exception as e: - capture_exception(e) - return { - "error": "Something went wrong could not fetch information from jira" - } diff --git a/apiserver/plane/utils/integrations/__init__.py b/apiserver/plane/utils/integrations/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/apiserver/plane/utils/integrations/github.py b/apiserver/plane/utils/integrations/github.py deleted file mode 100644 index 5a7ce2aa29f..00000000000 --- a/apiserver/plane/utils/integrations/github.py +++ /dev/null @@ -1,154 +0,0 @@ -import os -import jwt -import requests -from urllib.parse import urlparse, parse_qs -from datetime import datetime, timedelta -from cryptography.hazmat.primitives.serialization import load_pem_private_key -from cryptography.hazmat.backends import default_backend -from django.conf import settings - - -def get_jwt_token(): - app_id = os.environ.get("GITHUB_APP_ID", "") - secret = bytes( - os.environ.get("GITHUB_APP_PRIVATE_KEY", ""), encoding="utf8" - ) - current_timestamp = int(datetime.now().timestamp()) - due_date = datetime.now() + timedelta(minutes=10) - expiry = int(due_date.timestamp()) - payload = { - "iss": app_id, - "sub": app_id, - "exp": expiry, - "iat": current_timestamp, - "aud": "https://github.com/login/oauth/access_token", - } - - priv_rsakey = load_pem_private_key(secret, None, default_backend()) - token = jwt.encode(payload, priv_rsakey, algorithm="RS256") - return token - - -def get_github_metadata(installation_id): - token = get_jwt_token() - - url = f"https://api.github.com/app/installations/{installation_id}" - headers = { - "Authorization": "Bearer " + str(token), - "Accept": "application/vnd.github+json", - } - response = requests.get(url, headers=headers).json() - return response - - -def get_github_repos(access_tokens_url, repositories_url): - token = get_jwt_token() - - headers = { - "Authorization": "Bearer " + str(token), - "Accept": "application/vnd.github+json", - } - - oauth_response = requests.post( - access_tokens_url, - headers=headers, - ).json() - - oauth_token = oauth_response.get("token", "") - headers = { - "Authorization": "Bearer " + str(oauth_token), - "Accept": "application/vnd.github+json", - } - response = requests.get( - repositories_url, - headers=headers, - ).json() - return response - - -def delete_github_installation(installation_id): - token = get_jwt_token() - - url = f"https://api.github.com/app/installations/{installation_id}" - headers = { - "Authorization": "Bearer " + str(token), - "Accept": "application/vnd.github+json", - } - response = requests.delete(url, headers=headers) - return response - - -def get_github_repo_details(access_tokens_url, owner, repo): - token = get_jwt_token() - - headers = { - "Authorization": "Bearer " + str(token), - "Accept": "application/vnd.github+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - oauth_response = requests.post( - access_tokens_url, - headers=headers, - ).json() - - oauth_token = oauth_response.get("token") - headers = { - "Authorization": "Bearer " + oauth_token, - "Accept": "application/vnd.github+json", - } - open_issues = requests.get( - f"https://api.github.com/repos/{owner}/{repo}", - headers=headers, - ).json()["open_issues_count"] - - total_labels = 0 - - labels_response = requests.get( - f"https://api.github.com/repos/{owner}/{repo}/labels?per_page=100&page=1", - headers=headers, - ) - - # Check if there are more pages - if len(labels_response.links.keys()): - # get the query parameter of last - last_url = labels_response.links.get("last").get("url") - parsed_url = urlparse(last_url) - last_page_value = parse_qs(parsed_url.query)["page"][0] - total_labels = total_labels + 100 * (int(last_page_value) - 1) - - # Get labels in last page - last_page_labels = requests.get(last_url, headers=headers).json() - total_labels = total_labels + len(last_page_labels) - else: - total_labels = len(labels_response.json()) - - # Currently only supporting upto 100 collaborators - # TODO: Update this function to fetch all collaborators - collaborators = requests.get( - f"https://api.github.com/repos/{owner}/{repo}/collaborators?per_page=100&page=1", - headers=headers, - ).json() - - return open_issues, total_labels, collaborators - - -def get_release_notes(): - token = settings.GITHUB_ACCESS_TOKEN - - if token: - headers = { - "Authorization": "Bearer " + str(token), - "Accept": "application/vnd.github.v3+json", - } - else: - headers = { - "Accept": "application/vnd.github.v3+json", - } - url = "https://api.github.com/repos/makeplane/plane/releases?per_page=5&page=1" - response = requests.get(url, headers=headers) - - if response.status_code != 200: - return {"error": "Unable to render information from Github Repository"} - - return response.json() diff --git a/apiserver/plane/utils/integrations/slack.py b/apiserver/plane/utils/integrations/slack.py deleted file mode 100644 index 0cc5b93b27e..00000000000 --- a/apiserver/plane/utils/integrations/slack.py +++ /dev/null @@ -1,21 +0,0 @@ -import os -import requests - - -def slack_oauth(code): - SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False) - SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False) - SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False) - - # Oauth Slack - if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET: - response = requests.get( - SLACK_OAUTH_URL, - params={ - "code": code, - "client_id": SLACK_CLIENT_ID, - "client_secret": SLACK_CLIENT_SECRET, - }, - ) - return response.json() - return {} diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index 87284ff24e0..2c4cbd47174 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -463,7 +463,7 @@ def filter_start_target_date_issues(params, filter, method): filter["target_date__isnull"] = False filter["start_date__isnull"] = False return filter - + def issue_filters(query_params, method): filter = {} diff --git a/apiserver/plane/utils/issue_search.py b/apiserver/plane/utils/issue_search.py index 3b6dea332ec..74d1e80194a 100644 --- a/apiserver/plane/utils/issue_search.py +++ b/apiserver/plane/utils/issue_search.py @@ -5,7 +5,6 @@ from django.db.models import Q # Module imports -from plane.db.models import Issue def search_issues(query, queryset): diff --git a/apiserver/plane/utils/logging.py b/apiserver/plane/utils/logging.py new file mode 100644 index 00000000000..8021689e993 --- /dev/null +++ b/apiserver/plane/utils/logging.py @@ -0,0 +1,46 @@ +import logging.handlers as handlers +import time + + +class SizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler): + """ + Handler for logging to a set of files, which switches from one file + to the next when the current file reaches a certain size, or at certain + timed intervals + """ + + def __init__( + self, + filename, + maxBytes=0, + backupCount=0, + encoding=None, + delay=0, + when="h", + interval=1, + utc=False, + ): + handlers.TimedRotatingFileHandler.__init__( + self, filename, when, interval, backupCount, encoding, delay, utc + ) + self.maxBytes = maxBytes + + def shouldRollover(self, record): + """ + Determine if rollover should occur. + + Basically, see if the supplied record would cause the file to exceed + the size limit we have. + """ + if self.stream is None: # delay was set... + self.stream = self._open() + if self.maxBytes > 0: # are we rolling over? + msg = "%s\n" % self.format(record) + # due to non-posix-compliant Windows feature + self.stream.seek(0, 2) + if self.stream.tell() + len(msg) >= self.maxBytes: + return 1 + t = int(time.time()) + if t >= self.rolloverAt: + return 1 + return 0 diff --git a/apiserver/plane/utils/paginator.py b/apiserver/plane/utils/paginator.py index 6b2b49c15f0..db0ede6ad76 100644 --- a/apiserver/plane/utils/paginator.py +++ b/apiserver/plane/utils/paginator.py @@ -193,7 +193,7 @@ def paginate( cursor_result = paginator.get_result( limit=per_page, cursor=input_cursor ) - except BadPaginationError as e: + except BadPaginationError: raise ParseError(detail="Error in parsing") # Serialize result according to the on_result function diff --git a/apiserver/plane/web/views.py b/apiserver/plane/web/views.py index 91ea44a218f..60f00ef0ef3 100644 --- a/apiserver/plane/web/views.py +++ b/apiserver/plane/web/views.py @@ -1,3 +1 @@ -from django.shortcuts import render - # Create your views here. diff --git a/apiserver/pyproject.toml b/apiserver/pyproject.toml index 773d6090e47..a6c07b85563 100644 --- a/apiserver/pyproject.toml +++ b/apiserver/pyproject.toml @@ -16,3 +16,10 @@ exclude = ''' | venv )/ ''' + +[tool.ruff] +line-length = 79 +exclude = [ + "**/__init__.py", +] + diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index eb0f542012b..2b7d383ba10 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -1,6 +1,6 @@ # base requirements -Django==4.2.10 +Django==4.2.11 psycopg==3.1.12 djangorestframework==3.14.0 redis==4.6.0 @@ -27,6 +27,7 @@ psycopg-binary==3.1.12 psycopg-c==3.1.12 scout-apm==2.26.1 openpyxl==3.1.2 +python-json-logger==2.0.7 beautifulsoup4==4.12.2 dj-database-url==2.1.0 posthog==3.0.2 diff --git a/apiserver/templates/emails/invitations/project_invitation.html b/apiserver/templates/emails/invitations/project_invitation.html index 630a5eab3b0..def576601a9 100644 --- a/apiserver/templates/emails/invitations/project_invitation.html +++ b/apiserver/templates/emails/invitations/project_invitation.html @@ -1,349 +1,1815 @@ - - - - - - - {{ first_name }} invited you to join {{ project_name }} on Plane - - - - - - - - + + + + - - - - - - + + + + + + diff --git a/deploy/1-click/README.md b/deploy/1-click/README.md deleted file mode 100644 index 88ea66c4c8e..00000000000 --- a/deploy/1-click/README.md +++ /dev/null @@ -1,78 +0,0 @@ -# 1-Click Self-Hosting - -In this guide, we will walk you through the process of setting up a 1-click self-hosted environment. Self-hosting allows you to have full control over your applications and data. It's a great way to ensure privacy, control, and customization. - -Let's get started! - -## Installing Plane - -Installing Plane is a very easy and minimal step process. - -### Prerequisite - -- Operating System (latest): Debian / Ubuntu / Centos -- Supported CPU Architechture: AMD64 / ARM64 / x86_64 / aarch64 - -### Downloading Latest Stable Release - -``` -curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/1-click/install.sh | sh - - -``` - -
- Downloading Preview Release - -``` -export BRANCH=preview - -curl -fsSL https://raw.githubusercontent.com/makeplane/plane/preview/deploy/1-click/install.sh | sh - - -``` - -NOTE: `Preview` builds do not support ARM64/AARCH64 CPU architecture -
- --- - - -Expect this after a successful install - -![Install Output](images/install.png) - -Access the application on a browser via http://server-ip-address - ---- - -### Get Control of your Plane Server Setup - -Plane App is available via the command `plane-app`. Running the command `plane-app --help` helps you to manage Plane - -![Plane Help](images/help.png) - -Basic Operations: -1. Start Server using `plane-app start` -1. Stop Server using `plane-app stop` -1. Restart Server using `plane-app restart` - -Advanced Operations: -1. Configure Plane using `plane-app --configure`. This will give you options to modify - - NGINX Port (default 80) - - Domain Name (default is the local server public IP address) - - File Upload Size (default 5MB) - - External Postgres DB Url (optional - default empty) - - External Redis URL (optional - default empty) - - AWS S3 Bucket (optional - to be configured only in case the user wants to use an S3 Bucket) - -1. Upgrade Plane using `plane-app --upgrade`. This will get the latest stable version of Plane files (docker-compose.yaml, .env, and docker images) - -1. Updating Plane App installer using `plane-app --update-installer` will update the `plane-app` utility. - -1. Uninstall Plane using `plane-app --uninstall`. This will uninstall the Plane application from the server and all docker containers but do not remove the data stored in Postgres, Redis, and Minio. - -1. Plane App can be reinstalled using `plane-app --install`. - -Application Data is stored in the mentioned folders: -1. DB Data: /opt/plane/data/postgres -1. Redis Data: /opt/plane/data/redis -1. Minio Data: /opt/plane/data/minio \ No newline at end of file diff --git a/deploy/1-click/images/help.png b/deploy/1-click/images/help.png deleted file mode 100644 index c14603a4b1e..00000000000 Binary files a/deploy/1-click/images/help.png and /dev/null differ diff --git a/deploy/1-click/images/install.png b/deploy/1-click/images/install.png deleted file mode 100644 index c8ba1e5f829..00000000000 Binary files a/deploy/1-click/images/install.png and /dev/null differ diff --git a/deploy/1-click/install.sh b/deploy/1-click/install.sh deleted file mode 100644 index f789b74e0cb..00000000000 --- a/deploy/1-click/install.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/bash - -export GIT_REPO=makeplane/plane - -# Check if the user has sudo access -if command -v curl &> /dev/null; then - sudo curl -sSL \ - -o /usr/local/bin/plane-app \ - https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s) -else - sudo wget -q \ - -O /usr/local/bin/plane-app \ - https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s) -fi - -sudo chmod +x /usr/local/bin/plane-app -sudo sed -i 's@export DEPLOY_BRANCH=${BRANCH:-master}@export DEPLOY_BRANCH='${BRANCH:-master}'@' /usr/local/bin/plane-app -sudo sed -i 's@CODE_REPO=${GIT_REPO:-torbenraab/plane}@CODE_REPO='$GIT_REPO'@' /usr/local/bin/plane-app - -plane-app -i #--help diff --git a/deploy/1-click/plane-app b/deploy/1-click/plane-app deleted file mode 100644 index be3718c9267..00000000000 --- a/deploy/1-click/plane-app +++ /dev/null @@ -1,803 +0,0 @@ -#!/bin/bash - -function print_header() { -clear - -cat <<"EOF" ---------------------------------------- - ____ _ -| _ \| | __ _ _ __ ___ -| |_) | |/ _` | '_ \ / _ \ -| __/| | (_| | | | | __/ -|_| |_|\__,_|_| |_|\___| - ---------------------------------------- -Project management tool from the future ---------------------------------------- - -EOF -} -function update_env_file() { - config_file=$1 - key=$2 - value=$3 - - # Check if the config file exists - if [ ! -f "$config_file" ]; then - echo "Config file not found. Creating a new one..." >&2 - sudo touch "$config_file" - fi - - # Check if the key already exists in the config file - if sudo grep "^$key=" "$config_file"; then - sudo awk -v key="$key" -v value="$value" -F '=' '{if ($1 == key) $2 = value} 1' OFS='=' "$config_file" | sudo tee "$config_file.tmp" > /dev/null - sudo mv "$config_file.tmp" "$config_file" &> /dev/null - else - # sudo echo "$key=$value" >> "$config_file" - echo -e "$key=$value" | sudo tee -a "$config_file" > /dev/null - fi -} -function read_env_file() { - config_file=$1 - key=$2 - - # Check if the config file exists - if [ ! -f "$config_file" ]; then - echo "Config file not found. Creating a new one..." >&2 - sudo touch "$config_file" - fi - - # Check if the key already exists in the config file - if sudo grep -q "^$key=" "$config_file"; then - value=$(sudo awk -v key="$key" -F '=' '{if ($1 == key) print $2}' "$config_file") - echo "$value" - else - echo "" - fi -} -function update_config() { - config_file="$PLANE_INSTALL_DIR/config.env" - update_env_file $config_file $1 $2 -} -function read_config() { - config_file="$PLANE_INSTALL_DIR/config.env" - read_env_file $config_file $1 -} -function update_env() { - config_file="$PLANE_INSTALL_DIR/.env" - update_env_file $config_file $1 $2 -} -function read_env() { - config_file="$PLANE_INSTALL_DIR/.env" - read_env_file $config_file $1 -} -function show_message() { - print_header - - if [ "$2" == "replace_last_line" ]; then - PROGRESS_MSG[-1]="$1" - else - PROGRESS_MSG+=("$1") - fi - - for statement in "${PROGRESS_MSG[@]}"; do - echo "$statement" - done - -} -function prepare_environment() { - show_message "Prepare Environment..." >&2 - - show_message "- Updating OS with required tools ✋" >&2 - sudo "$PACKAGE_MANAGER" update -y - # sudo "$PACKAGE_MANAGER" upgrade -y - - local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap" "jq") - - for tool in "${required_tools[@]}"; do - if ! command -v $tool &> /dev/null; then - sudo "$PACKAGE_MANAGER" install -y $tool - fi - done - - show_message "- OS Updated ✅" "replace_last_line" >&2 - - # Install Docker if not installed - if ! command -v docker &> /dev/null; then - show_message "- Installing Docker ✋" >&2 - # curl -o- https://get.docker.com | bash - - - if [ "$PACKAGE_MANAGER" == "yum" ]; then - sudo $PACKAGE_MANAGER install -y yum-utils - sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo &> /dev/null - elif [ "$PACKAGE_MANAGER" == "apt-get" ]; then - # Add Docker's official GPG key: - sudo $PACKAGE_MANAGER update - sudo $PACKAGE_MANAGER install ca-certificates curl &> /dev/null - sudo install -m 0755 -d /etc/apt/keyrings &> /dev/null - sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc &> /dev/null - sudo chmod a+r /etc/apt/keyrings/docker.asc &> /dev/null - - # Add the repository to Apt sources: - echo \ - "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ - $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ - sudo tee /etc/apt/sources.list.d/docker.list > /dev/null - - sudo $PACKAGE_MANAGER update - fi - - sudo $PACKAGE_MANAGER install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -y - - show_message "- Docker Installed ✅" "replace_last_line" >&2 - else - show_message "- Docker is already installed ✅" >&2 - fi - - update_config "PLANE_ARCH" "$CPU_ARCH" - update_config "DOCKER_VERSION" "$(docker -v | awk '{print $3}' | sed 's/,//g')" - update_config "PLANE_DATA_DIR" "$DATA_DIR" - update_config "PLANE_LOG_DIR" "$LOG_DIR" - - # echo "TRUE" - echo "Environment prepared successfully ✅" - show_message "Environment prepared successfully ✅" >&2 - show_message "" >&2 - return 0 -} -function download_plane() { - # Download Docker Compose File from github url - show_message "Downloading Plane Setup Files ✋" >&2 - sudo curl -H 'Cache-Control: no-cache, no-store' \ - -s -o $PLANE_INSTALL_DIR/docker-compose.yaml \ - https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s) - - sudo curl -H 'Cache-Control: no-cache, no-store' \ - -s -o $PLANE_INSTALL_DIR/variables-upgrade.env \ - https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s) - - # if .env does not exists rename variables-upgrade.env to .env - if [ ! -f "$PLANE_INSTALL_DIR/.env" ]; then - sudo mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env - fi - - show_message "Plane Setup Files Downloaded ✅" "replace_last_line" >&2 - show_message "" >&2 - - echo "PLANE_DOWNLOADED" - return 0 -} -function printUsageInstructions() { - show_message "" >&2 - show_message "----------------------------------" >&2 - show_message "Usage Instructions" >&2 - show_message "----------------------------------" >&2 - show_message "" >&2 - show_message "To use the Plane Setup utility, use below commands" >&2 - show_message "" >&2 - - show_message "Usage: plane-app [OPTION]" >&2 - show_message "" >&2 - show_message " start Start Server" >&2 - show_message " stop Stop Server" >&2 - show_message " restart Restart Server" >&2 - show_message "" >&2 - show_message "other options" >&2 - show_message " -i, --install Install Plane" >&2 - show_message " -c, --configure Configure Plane" >&2 - show_message " -up, --upgrade Upgrade Plane" >&2 - show_message " -un, --uninstall Uninstall Plane" >&2 - show_message " -ui, --update-installer Update Plane Installer" >&2 - show_message " -h, --help Show help" >&2 - show_message "" >&2 - show_message "" >&2 - show_message "Application Data is stored in mentioned folders" >&2 - show_message " - DB Data: $DATA_DIR/postgres" >&2 - show_message " - Redis Data: $DATA_DIR/redis" >&2 - show_message " - Minio Data: $DATA_DIR/minio" >&2 - show_message "" >&2 - show_message "" >&2 - show_message "----------------------------------" >&2 - show_message "" >&2 -} -function build_local_image() { - show_message "- Downloading Plane Source Code ✋" >&2 - REPO=https://github.com/$CODE_REPO.git - CURR_DIR=$PWD - PLANE_TEMP_CODE_DIR=$PLANE_INSTALL_DIR/temp - sudo rm -rf $PLANE_TEMP_CODE_DIR > /dev/null - - sudo git clone $REPO $PLANE_TEMP_CODE_DIR --branch $DEPLOY_BRANCH --single-branch -q > /dev/null - - sudo cp $PLANE_TEMP_CODE_DIR/deploy/selfhost/build.yml $PLANE_TEMP_CODE_DIR/build.yml - - show_message "- Plane Source Code Downloaded ✅" "replace_last_line" >&2 - - show_message "- Building Docker Images ✋" >&2 - sudo docker compose --env-file=$PLANE_INSTALL_DIR/.env -f $PLANE_TEMP_CODE_DIR/build.yml build --no-cache -} -function check_for_docker_images() { - show_message "" >&2 - # show_message "Building Plane Images" >&2 - - CURR_DIR=$(pwd) - - if [ "$DEPLOY_BRANCH" == "master" ]; then - update_env "APP_RELEASE" "latest" - export APP_RELEASE=latest - else - update_env "APP_RELEASE" "$DEPLOY_BRANCH" - export APP_RELEASE=$DEPLOY_BRANCH - fi - - if [ $USE_GLOBAL_IMAGES == 1 ]; then - # show_message "Building Plane Images for $CPU_ARCH is not required. Skipping... ✅" "replace_last_line" >&2 - export DOCKERHUB_USER=ghcr.io/torbenraab/plane - update_env "DOCKERHUB_USER" "$DOCKERHUB_USER" - update_env "PULL_POLICY" "always" - echo "Building Plane Images for $CPU_ARCH is not required. Skipping..." - else - export DOCKERHUB_USER=myplane - show_message "Building Plane Images for $CPU_ARCH " >&2 - update_env "DOCKERHUB_USER" "$DOCKERHUB_USER" - update_env "PULL_POLICY" "never" - - build_local_image - - sudo rm -rf $PLANE_INSTALL_DIR/temp > /dev/null - - show_message "- Docker Images Built ✅" "replace_last_line" >&2 - sudo cd $CURR_DIR - fi - - sudo sed -i "s|- pgdata:|- $DATA_DIR/postgres:|g" $PLANE_INSTALL_DIR/docker-compose.yaml - sudo sed -i "s|- redisdata:|- $DATA_DIR/redis:|g" $PLANE_INSTALL_DIR/docker-compose.yaml - sudo sed -i "s|- uploads:|- $DATA_DIR/minio:|g" $PLANE_INSTALL_DIR/docker-compose.yaml - - show_message "Downloading Plane Images for $CPU_ARCH ✋" >&2 - sudo docker compose -f $PLANE_INSTALL_DIR/docker-compose.yaml --env-file=$PLANE_INSTALL_DIR/.env pull - show_message "Plane Images Downloaded ✅" "replace_last_line" >&2 -} -function configure_plane() { - show_message "" >&2 - show_message "Configuring Plane" >&2 - show_message "" >&2 - - exec 3>&1 - - nginx_port=$(read_env "NGINX_PORT") - domain_name=$(read_env "DOMAIN_NAME") - upload_limit=$(read_env "FILE_SIZE_LIMIT") - - NGINX_SETTINGS=$(dialog \ - --ok-label "Next" \ - --cancel-label "Skip" \ - --backtitle "Plane Configuration" \ - --title "Nginx Settings" \ - --form "" \ - 0 0 0 \ - "Port:" 1 1 "${nginx_port:-80}" 1 10 50 0 \ - "Domain:" 2 1 "${domain_name:-localhost}" 2 10 50 0 \ - "Upload Limit:" 3 1 "${upload_limit:-5242880}" 3 10 15 0 \ - 2>&1 1>&3) - - save_nginx_settings=0 - if [ $? -eq 0 ]; then - save_nginx_settings=1 - nginx_port=$(echo "$NGINX_SETTINGS" | sed -n 1p) - domain_name=$(echo "$NGINX_SETTINGS" | sed -n 2p) - upload_limit=$(echo "$NGINX_SETTINGS" | sed -n 3p) - fi - - - # smtp_host=$(read_env "EMAIL_HOST") - # smtp_user=$(read_env "EMAIL_HOST_USER") - # smtp_password=$(read_env "EMAIL_HOST_PASSWORD") - # smtp_port=$(read_env "EMAIL_PORT") - # smtp_from=$(read_env "EMAIL_FROM") - # smtp_tls=$(read_env "EMAIL_USE_TLS") - # smtp_ssl=$(read_env "EMAIL_USE_SSL") - - # SMTP_SETTINGS=$(dialog \ - # --ok-label "Next" \ - # --cancel-label "Skip" \ - # --backtitle "Plane Configuration" \ - # --title "SMTP Settings" \ - # --form "" \ - # 0 0 0 \ - # "Host:" 1 1 "$smtp_host" 1 10 80 0 \ - # "User:" 2 1 "$smtp_user" 2 10 80 0 \ - # "Password:" 3 1 "$smtp_password" 3 10 80 0 \ - # "Port:" 4 1 "${smtp_port:-587}" 4 10 5 0 \ - # "From:" 5 1 "${smtp_from:-Mailer }" 5 10 80 0 \ - # "TLS:" 6 1 "${smtp_tls:-1}" 6 10 1 1 \ - # "SSL:" 7 1 "${smtp_ssl:-0}" 7 10 1 1 \ - # 2>&1 1>&3) - - # save_smtp_settings=0 - # if [ $? -eq 0 ]; then - # save_smtp_settings=1 - # smtp_host=$(echo "$SMTP_SETTINGS" | sed -n 1p) - # smtp_user=$(echo "$SMTP_SETTINGS" | sed -n 2p) - # smtp_password=$(echo "$SMTP_SETTINGS" | sed -n 3p) - # smtp_port=$(echo "$SMTP_SETTINGS" | sed -n 4p) - # smtp_from=$(echo "$SMTP_SETTINGS" | sed -n 5p) - # smtp_tls=$(echo "$SMTP_SETTINGS" | sed -n 6p) - # fi - external_pgdb_url=$(dialog \ - --backtitle "Plane Configuration" \ - --title "Using External Postgres Database ?" \ - --ok-label "Next" \ - --cancel-label "Skip" \ - --inputbox "Enter your external database url" \ - 8 60 3>&1 1>&2 2>&3) - - - external_redis_url=$(dialog \ - --backtitle "Plane Configuration" \ - --title "Using External Redis Database ?" \ - --ok-label "Next" \ - --cancel-label "Skip" \ - --inputbox "Enter your external redis url" \ - 8 60 3>&1 1>&2 2>&3) - - - aws_region=$(read_env "AWS_REGION") - aws_access_key=$(read_env "AWS_ACCESS_KEY_ID") - aws_secret_key=$(read_env "AWS_SECRET_ACCESS_KEY") - aws_bucket=$(read_env "AWS_S3_BUCKET_NAME") - - - AWS_S3_SETTINGS=$(dialog \ - --ok-label "Next" \ - --cancel-label "Skip" \ - --backtitle "Plane Configuration" \ - --title "AWS S3 Bucket Configuration" \ - --form "" \ - 0 0 0 \ - "Region:" 1 1 "$aws_region" 1 10 50 0 \ - "Access Key:" 2 1 "$aws_access_key" 2 10 50 0 \ - "Secret Key:" 3 1 "$aws_secret_key" 3 10 50 0 \ - "Bucket:" 4 1 "$aws_bucket" 4 10 50 0 \ - 2>&1 1>&3) - - save_aws_settings=0 - if [ $? -eq 0 ]; then - save_aws_settings=1 - aws_region=$(echo "$AWS_S3_SETTINGS" | sed -n 1p) - aws_access_key=$(echo "$AWS_S3_SETTINGS" | sed -n 2p) - aws_secret_key=$(echo "$AWS_S3_SETTINGS" | sed -n 3p) - aws_bucket=$(echo "$AWS_S3_SETTINGS" | sed -n 4p) - fi - - # display dialogbox asking for confirmation to continue - CONFIRM_CONFIG=$(dialog \ - --title "Confirm Configuration" \ - --backtitle "Plane Configuration" \ - --yes-label "Confirm" \ - --no-label "Cancel" \ - --yesno \ - " - save_ngnix_settings: $save_nginx_settings - nginx_port: $nginx_port - domain_name: $domain_name - upload_limit: $upload_limit - - save_aws_settings: $save_aws_settings - aws_region: $aws_region - aws_access_key: $aws_access_key - aws_secret_key: $aws_secret_key - aws_bucket: $aws_bucket - - pdgb_url: $external_pgdb_url - redis_url: $external_redis_url - " \ - 0 0 3>&1 1>&2 2>&3) - - if [ $? -eq 0 ]; then - if [ $save_nginx_settings == 1 ]; then - update_env "NGINX_PORT" "$nginx_port" - update_env "DOMAIN_NAME" "$domain_name" - update_env "WEB_URL" "http://$domain_name" - update_env "CORS_ALLOWED_ORIGINS" "http://$domain_name" - update_env "FILE_SIZE_LIMIT" "$upload_limit" - fi - - # check enable smpt settings value - # if [ $save_smtp_settings == 1 ]; then - # update_env "EMAIL_HOST" "$smtp_host" - # update_env "EMAIL_HOST_USER" "$smtp_user" - # update_env "EMAIL_HOST_PASSWORD" "$smtp_password" - # update_env "EMAIL_PORT" "$smtp_port" - # update_env "EMAIL_FROM" "$smtp_from" - # update_env "EMAIL_USE_TLS" "$smtp_tls" - # update_env "EMAIL_USE_SSL" "$smtp_ssl" - # fi - - # check enable aws settings value - if [[ $save_aws_settings == 1 && $aws_access_key != "" && $aws_secret_key != "" ]] ; then - update_env "USE_MINIO" "0" - update_env "AWS_REGION" "$aws_region" - update_env "AWS_ACCESS_KEY_ID" "$aws_access_key" - update_env "AWS_SECRET_ACCESS_KEY" "$aws_secret_key" - update_env "AWS_S3_BUCKET_NAME" "$aws_bucket" - elif [[ -z $aws_access_key || -z $aws_secret_key ]] ; then - update_env "USE_MINIO" "1" - update_env "AWS_REGION" "" - update_env "AWS_ACCESS_KEY_ID" "" - update_env "AWS_SECRET_ACCESS_KEY" "" - update_env "AWS_S3_BUCKET_NAME" "uploads" - fi - - if [ "$external_pgdb_url" != "" ]; then - update_env "DATABASE_URL" "$external_pgdb_url" - fi - if [ "$external_redis_url" != "" ]; then - update_env "REDIS_URL" "$external_redis_url" - fi - fi - - exec 3>&- -} -function upgrade_configuration() { - upg_env_file="$PLANE_INSTALL_DIR/variables-upgrade.env" - # Check if the file exists - if [ -f "$upg_env_file" ]; then - # Read each line from the file - while IFS= read -r line; do - # Skip comments and empty lines - if [[ "$line" =~ ^\s*#.*$ ]] || [[ -z "$line" ]]; then - continue - fi - - # Split the line into key and value - key=$(echo "$line" | cut -d'=' -f1) - value=$(echo "$line" | cut -d'=' -f2-) - - current_value=$(read_env "$key") - - if [ -z "$current_value" ]; then - update_env "$key" "$value" - fi - done < "$upg_env_file" - fi -} -function install() { - show_message "" - if [ "$(uname)" == "Linux" ]; then - OS="linux" - OS_NAME=$(sudo awk -F= '/^ID=/{print $2}' /etc/os-release) - OS_NAME=$(echo "$OS_NAME" | tr -d '"') - print_header - if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] || - [ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then - OS_SUPPORTED=true - show_message "******** Installing Plane ********" - show_message "" - - prepare_environment - - if [ $? -eq 0 ]; then - download_plane - if [ $? -eq 0 ]; then - # create_service - check_for_docker_images - - last_installed_on=$(read_config "INSTALLATION_DATE") - # if [ "$last_installed_on" == "" ]; then - # configure_plane - # fi - - update_env "NGINX_PORT" "80" - update_env "DOMAIN_NAME" "$MY_IP" - update_env "WEB_URL" "http://$MY_IP" - update_env "CORS_ALLOWED_ORIGINS" "http://$MY_IP" - - update_config "INSTALLATION_DATE" "$(date '+%Y-%m-%d')" - - if command -v crontab &> /dev/null; then - sudo touch /etc/cron.daily/makeplane - sudo chmod +x /etc/cron.daily/makeplane - sudo echo "0 2 * * * root /usr/local/bin/plane-app --upgrade" > /etc/cron.daily/makeplane - sudo crontab /etc/cron.daily/makeplane - fi - - show_message "Plane Installed Successfully ✅" - show_message "" - else - show_message "Download Failed ❌" - exit 1 - fi - else - show_message "Initialization Failed ❌" - exit 1 - fi - - else - OS_SUPPORTED=false - PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌" - show_message "" - exit 1 - fi - else - PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌" - show_message "" - exit 1 - fi -} -function upgrade() { - print_header - if [ "$(uname)" == "Linux" ]; then - OS="linux" - OS_NAME=$(sudo awk -F= '/^ID=/{print $2}' /etc/os-release) - OS_NAME=$(echo "$OS_NAME" | tr -d '"') - if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] || - [ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then - - OS_SUPPORTED=true - show_message "******** Upgrading Plane ********" - show_message "" - - prepare_environment - - if [ $? -eq 0 ]; then - stop_server - download_plane - if [ $? -eq 0 ]; then - check_for_docker_images - upgrade_configuration - update_config "UPGRADE_DATE" "$(date)" - - start_server - - show_message "" - show_message "Plane Upgraded Successfully ✅" - show_message "" - printUsageInstructions - else - show_message "Download Failed ❌" - exit 1 - fi - else - show_message "Initialization Failed ❌" - exit 1 - fi - else - PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌" - show_message "" - exit 1 - fi - else - PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌" - show_message "" - exit 1 - fi -} -function uninstall() { - print_header - if [ "$(uname)" == "Linux" ]; then - OS="linux" - OS_NAME=$(awk -F= '/^ID=/{print $2}' /etc/os-release) - OS_NAME=$(echo "$OS_NAME" | tr -d '"') - if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] || - [ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then - - OS_SUPPORTED=true - show_message "******** Uninstalling Plane ********" - show_message "" - - stop_server - - if ! [ -x "$(command -v docker)" ]; then - echo "DOCKER_NOT_INSTALLED" &> /dev/null - else - # Ask of user input to confirm uninstall docker ? - CONFIRM_DOCKER_PURGE=$(dialog --title "Uninstall Docker" --defaultno --yesno "Are you sure you want to uninstall docker ?" 8 60 3>&1 1>&2 2>&3) - if [ $? -eq 0 ]; then - show_message "- Uninstalling Docker ✋" - sudo docker images -q | xargs -r sudo docker rmi -f &> /dev/null - sudo "$PACKAGE_MANAGER" remove -y docker-engine docker docker.io docker-ce docker-ce-cli docker-compose-plugin &> /dev/null - sudo "$PACKAGE_MANAGER" autoremove -y docker-engine docker docker.io docker-ce docker-compose-plugin &> /dev/null - show_message "- Docker Uninstalled ✅" "replace_last_line" >&2 - fi - fi - - sudo rm $PLANE_INSTALL_DIR/.env &> /dev/null - sudo rm $PLANE_INSTALL_DIR/variables-upgrade.env &> /dev/null - sudo rm $PLANE_INSTALL_DIR/config.env &> /dev/null - sudo rm $PLANE_INSTALL_DIR/docker-compose.yaml &> /dev/null - - if command -v crontab &> /dev/null; then - sudo crontab -r &> /dev/null - sudo rm /etc/cron.daily/makeplane &> /dev/null - fi - - # rm -rf $PLANE_INSTALL_DIR &> /dev/null - show_message "- Configuration Cleaned ✅" - - show_message "" - show_message "******** Plane Uninstalled ********" - show_message "" - show_message "" - show_message "Plane Configuration Cleaned with some exceptions" - show_message "- DB Data: $DATA_DIR/postgres" - show_message "- Redis Data: $DATA_DIR/redis" - show_message "- Minio Data: $DATA_DIR/minio" - show_message "" - show_message "" - show_message "Thank you for using Plane. We hope to see you again soon." - show_message "" - show_message "" - else - PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌" - show_message "" - exit 1 - fi - else - PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌" - show_message "" - exit 1 - fi -} -function start_server() { - docker_compose_file="$PLANE_INSTALL_DIR/docker-compose.yaml" - env_file="$PLANE_INSTALL_DIR/.env" - # check if both the files exits - if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then - show_message "Starting Plane Server ($APP_RELEASE) ✋" - sudo docker compose -f $docker_compose_file --env-file=$env_file up -d - - # Wait for containers to be running - echo "Waiting for containers to start..." - while ! sudo docker compose -f "$docker_compose_file" --env-file="$env_file" ps --services --filter "status=running" --quiet | grep -q "."; do - sleep 1 - done - # wait for migrator container to exit with status 0 before starting the application - migrator_container_id=$(sudo docker container ls -aq -f "name=plane-migrator") - - # if migrator container is running, wait for it to exit - if [ -n "$migrator_container_id" ]; then - while sudo docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do - show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (Migrator in progress)" "replace_last_line" >&2 - sleep 1 - done - fi - - # if migrator exit status is not 0, show error message and exit - if [ -n "$migrator_container_id" ]; then - migrator_exit_code=$(sudo docker inspect --format='{{.State.ExitCode}}' $migrator_container_id) - if [ $migrator_exit_code -ne 0 ]; then - # show_message "Migrator failed with exit code $migrator_exit_code ❌" "replace_last_line" >&2 - show_message "Plane Server failed to start ❌" "replace_last_line" >&2 - stop_server - exit 1 - fi - fi - - api_container_id=$(sudo docker container ls -q -f "name=plane-api") - while ! sudo docker logs $api_container_id 2>&1 | grep -i "Application startup complete"; - do - show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (API starting)" "replace_last_line" >&2 - sleep 1 - done - show_message "Plane Server Started ($APP_RELEASE) ✅" "replace_last_line" >&2 - show_message "---------------------------------------------------------------" >&2 - show_message "Access the Plane application at http://$MY_IP" >&2 - show_message "---------------------------------------------------------------" >&2 - - else - show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2 - fi -} -function stop_server() { - docker_compose_file="$PLANE_INSTALL_DIR/docker-compose.yaml" - env_file="$PLANE_INSTALL_DIR/.env" - # check if both the files exits - if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then - show_message "Stopping Plane Server ($APP_RELEASE) ✋" - sudo docker compose -f $docker_compose_file --env-file=$env_file down - show_message "Plane Server Stopped ($APP_RELEASE) ✅" "replace_last_line" >&2 - else - show_message "Plane Server not installed [Skipping] ✅" "replace_last_line" >&2 - fi -} -function restart_server() { - docker_compose_file="$PLANE_INSTALL_DIR/docker-compose.yaml" - env_file="$PLANE_INSTALL_DIR/.env" - # check if both the files exits - if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then - show_message "Restarting Plane Server ($APP_RELEASE) ✋" - sudo docker compose -f $docker_compose_file --env-file=$env_file restart - show_message "Plane Server Restarted ($APP_RELEASE) ✅" "replace_last_line" >&2 - else - show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2 - fi -} -function show_help() { - # print_header - show_message "Usage: plane-app [OPTION]" >&2 - show_message "" >&2 - show_message " start Start Server" >&2 - show_message " stop Stop Server" >&2 - show_message " restart Restart Server" >&2 - show_message "" >&2 - show_message "other options" >&2 - show_message " -i, --install Install Plane" >&2 - show_message " -c, --configure Configure Plane" >&2 - show_message " -up, --upgrade Upgrade Plane" >&2 - show_message " -un, --uninstall Uninstall Plane" >&2 - show_message " -ui, --update-installer Update Plane Installer" >&2 - show_message " -h, --help Show help" >&2 - show_message "" >&2 - exit 1 - -} -function update_installer() { - show_message "Updating Plane Installer ✋" >&2 - sudo curl -H 'Cache-Control: no-cache, no-store' \ - -s -o /usr/local/bin/plane-app \ - https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s) - - sudo chmod +x /usr/local/bin/plane-app > /dev/null&> /dev/null - show_message "Plane Installer Updated ✅" "replace_last_line" >&2 -} - -export DEPLOY_BRANCH=${BRANCH:-master} -export APP_RELEASE=$DEPLOY_BRANCH -export DOCKERHUB_USER=ghcr.io/torbenraab/plane -export PULL_POLICY=always - -if [ "$DEPLOY_BRANCH" == "master" ]; then - export APP_RELEASE=latest -fi - -PLANE_INSTALL_DIR=/opt/plane -DATA_DIR=$PLANE_INSTALL_DIR/data -LOG_DIR=$PLANE_INSTALL_DIR/logs -CODE_REPO=${GIT_REPO:-torbenraab/plane} -OS_SUPPORTED=false -CPU_ARCH=$(uname -m) -PROGRESS_MSG="" -USE_GLOBAL_IMAGES=0 -PACKAGE_MANAGER="" -MY_IP=$(curl -s ifconfig.me) - -if [[ $CPU_ARCH == "amd64" || $CPU_ARCH == "x86_64" || ( $DEPLOY_BRANCH == "master" && ( $CPU_ARCH == "arm64" || $CPU_ARCH == "aarch64" ) ) ]]; then - USE_GLOBAL_IMAGES=1 -fi - -sudo mkdir -p $PLANE_INSTALL_DIR/{data,log} - -if command -v apt-get &> /dev/null; then - PACKAGE_MANAGER="apt-get" -elif command -v yum &> /dev/null; then - PACKAGE_MANAGER="yum" -elif command -v apk &> /dev/null; then - PACKAGE_MANAGER="apk" -fi - -if [ "$1" == "start" ]; then - start_server -elif [ "$1" == "stop" ]; then - stop_server -elif [ "$1" == "restart" ]; then - restart_server -elif [ "$1" == "--install" ] || [ "$1" == "-i" ]; then - install - start_server - show_message "" >&2 - show_message "To view help, use plane-app --help " >&2 -elif [ "$1" == "--configure" ] || [ "$1" == "-c" ]; then - configure_plane - printUsageInstructions -elif [ "$1" == "--upgrade" ] || [ "$1" == "-up" ]; then - upgrade -elif [ "$1" == "--uninstall" ] || [ "$1" == "-un" ]; then - uninstall -elif [ "$1" == "--update-installer" ] || [ "$1" == "-ui" ]; then - update_installer -elif [ "$1" == "--help" ] || [ "$1" == "-h" ]; then - show_help -else - show_help -fi diff --git a/deploy/selfhost/docker-compose.yml b/deploy/selfhost/docker-compose.yml index dba3e4383cf..37ba1aec2f1 100644 --- a/deploy/selfhost/docker-compose.yml +++ b/deploy/selfhost/docker-compose.yml @@ -1,21 +1,15 @@ version: "3.8" -x-app-env : &app-env +x-app-env: &app-env environment: - NGINX_PORT=${NGINX_PORT:-80} - WEB_URL=${WEB_URL:-http://localhost} - DEBUG=${DEBUG:-0} - - DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-plane.settings.production} # deprecated - - NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL:-http://localhost/spaces} # deprecated - - SENTRY_DSN=${SENTRY_DSN:-""} + - SENTRY_DSN=${SENTRY_DSN} - SENTRY_ENVIRONMENT=${SENTRY_ENVIRONMENT:-"production"} - - GOOGLE_CLIENT_ID=${GOOGLE_CLIENT_ID:-""} - - GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-""} - - GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""} - - DOCKERIZED=${DOCKERIZED:-1} # deprecated - - CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS:-""} + - CORS_ALLOWED_ORIGINS=${CORS_ALLOWED_ORIGINS} # Gunicorn Workers - - GUNICORN_WORKERS=${GUNICORN_WORKERS:-2} + - GUNICORN_WORKERS=${GUNICORN_WORKERS:-1} #DB SETTINGS - PGHOST=${PGHOST:-plane-db} - PGDATABASE=${PGDATABASE:-plane} @@ -23,25 +17,11 @@ x-app-env : &app-env - POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-plane} - POSTGRES_DB=${POSTGRES_DB:-plane} - PGDATA=${PGDATA:-/var/lib/postgresql/data} - - DATABASE_URL=${DATABASE_URL:-postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE}} + - DATABASE_URL=${DATABASE_URL:-postgresql://plane:plane@plane-db/plane} # REDIS SETTINGS - REDIS_HOST=${REDIS_HOST:-plane-redis} - REDIS_PORT=${REDIS_PORT:-6379} - - REDIS_URL=${REDIS_URL:-redis://${REDIS_HOST}:6379/} - # EMAIL SETTINGS - Deprecated can be configured through admin panel - - EMAIL_HOST=${EMAIL_HOST:-""} - - EMAIL_HOST_USER=${EMAIL_HOST_USER:-""} - - EMAIL_HOST_PASSWORD=${EMAIL_HOST_PASSWORD:-""} - - EMAIL_PORT=${EMAIL_PORT:-587} - - EMAIL_FROM=${EMAIL_FROM:-"Team Plane "} - - EMAIL_USE_TLS=${EMAIL_USE_TLS:-1} - - EMAIL_USE_SSL=${EMAIL_USE_SSL:-0} - - DEFAULT_EMAIL=${DEFAULT_EMAIL:-captain@plane.so} - - DEFAULT_PASSWORD=${DEFAULT_PASSWORD:-password123} - # LOGIN/SIGNUP SETTINGS - Deprecated can be configured through admin panel - - ENABLE_SIGNUP=${ENABLE_SIGNUP:-1} - - ENABLE_EMAIL_PASSWORD=${ENABLE_EMAIL_PASSWORD:-1} - - ENABLE_MAGIC_LINK_LOGIN=${ENABLE_MAGIC_LINK_LOGIN:-0} + - REDIS_URL=${REDIS_URL:-redis://plane-redis:6379/} # OIDC SETTINGS - Can be configured through admin panel - OIDC_AUTO=${OIDC_AUTO:-0} - OIDC_CLIENT_ID=${OIDC_CLIENT_ID:-""} @@ -68,7 +48,7 @@ x-app-env : &app-env services: web: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-frontend:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-frontend:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} restart: unless-stopped command: /usr/local/bin/start.sh web/server.js web @@ -80,7 +60,7 @@ services: space: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-space:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-space:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} restart: unless-stopped command: /usr/local/bin/start.sh space/server.js space @@ -93,22 +73,26 @@ services: api: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} restart: unless-stopped command: ./bin/takeoff deploy: replicas: ${API_REPLICAS:-1} + volumes: + - logs_api:/code/plane/logs depends_on: - plane-db - plane-redis worker: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} restart: unless-stopped command: ./bin/worker + volumes: + - logs_worker:/code/plane/logs depends_on: - api - plane-db @@ -116,10 +100,12 @@ services: beat-worker: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} restart: unless-stopped command: ./bin/beat + volumes: + - logs_beat-worker:/code/plane/logs depends_on: - api - plane-db @@ -127,19 +113,21 @@ services: migrator: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-backend:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} restart: no command: > - sh -c "python manage.py wait_for_db && - python manage.py migrate" + sh -c "python manage.py wait_for_db && + python manage.py migrate" + volumes: + - logs_migrator:/code/plane/logs depends_on: - plane-db - plane-redis plane-db: <<: *app-env - image: postgres:15.2-alpine + image: postgres:15.5-alpine pull_policy: if_not_present restart: unless-stopped command: postgres -c 'max_connections=1000' @@ -147,7 +135,7 @@ services: - pgdata:/var/lib/postgresql/data plane-redis: <<: *app-env - image: redis:6.2.7-alpine + image: redis:7.2.4-alpine pull_policy: if_not_present restart: unless-stopped volumes: @@ -165,10 +153,10 @@ services: # Comment this if you already have a reverse proxy running proxy: <<: *app-env - image: ${DOCKERHUB_USER:-makeplane}/plane-proxy:${APP_RELEASE:-latest} + image: ${DOCKERHUB_USER:-makeplane}/plane-proxy:${APP_RELEASE:-stable} pull_policy: ${PULL_POLICY:-always} ports: - - ${NGINX_PORT}:80 + - ${NGINX_PORT}:80 depends_on: - web - api @@ -178,3 +166,7 @@ volumes: pgdata: redisdata: uploads: + logs_api: + logs_worker: + logs_beat-worker: + logs_migrator: diff --git a/deploy/selfhost/install.sh b/deploy/selfhost/install.sh index 1f15e6fb413..8715a4c50da 100755 --- a/deploy/selfhost/install.sh +++ b/deploy/selfhost/install.sh @@ -17,16 +17,16 @@ function print_header() { clear cat <<"EOF" ---------------------------------------- - ____ _ -| _ \| | __ _ _ __ ___ -| |_) | |/ _` | '_ \ / _ \ -| __/| | (_| | | | | __/ -|_| |_|\__,_|_| |_|\___| - ---------------------------------------- +-------------------------------------------- + ____ _ ///////// +| _ \| | __ _ _ __ ___ ///////// +| |_) | |/ _` | '_ \ / _ \ ///// ///// +| __/| | (_| | | | | __/ ///// ///// +|_| |_|\__,_|_| |_|\___| //// + //// +-------------------------------------------- Project management tool from the future ---------------------------------------- +-------------------------------------------- EOF } @@ -66,7 +66,7 @@ function buildLocalImage() { cd $PLANE_TEMP_CODE_DIR if [ "$BRANCH" == "master" ]; then - export APP_RELEASE=latest + export APP_RELEASE=stable fi docker compose -f build.yml build --no-cache >&2 @@ -99,17 +99,17 @@ function download() { curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/docker-compose.yaml https://raw.githubusercontent.com/torbenraab/plane/$BRANCH/deploy/selfhost/docker-compose.yml?$(date +%s) curl -H 'Cache-Control: no-cache, no-store' -s -o $PLANE_INSTALL_DIR/variables-upgrade.env https://raw.githubusercontent.com/torbenraab/plane/$BRANCH/deploy/selfhost/variables.env?$(date +%s) - if [ -f "$PLANE_INSTALL_DIR/.env" ]; + if [ -f "$DOCKER_ENV_PATH" ]; then - cp $PLANE_INSTALL_DIR/.env $PLANE_INSTALL_DIR/archive/$TS.env + cp $DOCKER_ENV_PATH $PLANE_INSTALL_DIR/archive/$TS.env else - mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env + mv $PLANE_INSTALL_DIR/variables-upgrade.env $DOCKER_ENV_PATH fi if [ "$BRANCH" != "master" ]; then cp $PLANE_INSTALL_DIR/docker-compose.yaml $PLANE_INSTALL_DIR/temp.yaml - sed -e 's@${APP_RELEASE:-latest}@'"$BRANCH"'@g' \ + sed -e 's@${APP_RELEASE:-stable}@'"$BRANCH"'@g' \ $PLANE_INSTALL_DIR/temp.yaml > $PLANE_INSTALL_DIR/docker-compose.yaml rm $PLANE_INSTALL_DIR/temp.yaml @@ -131,9 +131,9 @@ function download() { fi echo "" - echo "Latest version is now available for you to use" + echo "Most recent Stable version is now available for you to use" echo "" - echo "In case of Upgrade, your new setting file is availabe as 'variables-upgrade.env'. Please compare and set the required values in '.env 'file." + echo "In case of Upgrade, your new setting file is availabe as 'variables-upgrade.env'. Please compare and set the required values in 'plane.env 'file." echo "" } @@ -144,7 +144,7 @@ function startServices() { if [ -n "$migrator_container_id" ]; then local idx=0 while docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do - local message=">>> Waiting for Data Migration to finish" + local message=">> Waiting for Data Migration to finish" local dots=$(printf '%*s' $idx | tr ' ' '.') echo -ne "\r$message$dots" ((idx++)) @@ -152,13 +152,18 @@ function startServices() { done fi printf "\r\033[K" + echo "" + echo " Data Migration completed successfully ✅" # if migrator exit status is not 0, show error message and exit if [ -n "$migrator_container_id" ]; then local migrator_exit_code=$(docker inspect --format='{{.State.ExitCode}}' $migrator_container_id) if [ $migrator_exit_code -ne 0 ]; then echo "Plane Server failed to start ❌" - stopServices + # stopServices + echo + echo "Please check the logs for the 'migrator' service and resolve the issue(s)." + echo "Stop the services by running the command: ./setup.sh stop" exit 1 fi fi @@ -167,26 +172,35 @@ function startServices() { local idx2=0 while ! docker logs $api_container_id 2>&1 | grep -m 1 -i "Application startup complete" | grep -q "."; do - local message=">>> Waiting for API Service to Start" + local message=">> Waiting for API Service to Start" local dots=$(printf '%*s' $idx2 | tr ' ' '.') echo -ne "\r$message$dots" ((idx2++)) sleep 1 done printf "\r\033[K" + echo " API Service started successfully ✅" + source "${DOCKER_ENV_PATH}" + echo " Plane Server started successfully ✅" + echo "" + echo " You can access the application at $WEB_URL" + echo "" + } function stopServices() { docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH down } function restartServices() { - docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH restart + # docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH restart + stopServices + startServices } function upgrade() { echo "***** STOPPING SERVICES ****" stopServices echo - echo "***** DOWNLOADING LATEST VERSION ****" + echo "***** DOWNLOADING STABLE VERSION ****" download echo "***** PLEASE VALIDATE AND START SERVICES ****" @@ -303,15 +317,15 @@ function askForAction() { elif [ "$ACTION" == "2" ] || [ "$DEFAULT_ACTION" == "start" ] then startServices - askForAction + # askForAction elif [ "$ACTION" == "3" ] || [ "$DEFAULT_ACTION" == "stop" ] then stopServices - askForAction + # askForAction elif [ "$ACTION" == "4" ] || [ "$DEFAULT_ACTION" == "restart" ] then restartServices - askForAction + # askForAction elif [ "$ACTION" == "5" ] || [ "$DEFAULT_ACTION" == "upgrade" ] then upgrade @@ -343,7 +357,7 @@ fi if [ "$BRANCH" == "master" ]; then - export APP_RELEASE=latest + export APP_RELEASE=stable fi # REMOVE SPECIAL CHARACTERS FROM BRANCH NAME @@ -354,7 +368,21 @@ fi mkdir -p $PLANE_INSTALL_DIR/archive DOCKER_FILE_PATH=$PLANE_INSTALL_DIR/docker-compose.yaml -DOCKER_ENV_PATH=$PLANE_INSTALL_DIR/.env +DOCKER_ENV_PATH=$PLANE_INSTALL_DIR/plane.env + +# BACKWARD COMPATIBILITY +OLD_DOCKER_ENV_PATH=$PLANE_INSTALL_DIR/.env +if [ -f "$OLD_DOCKER_ENV_PATH" ]; +then + mv "$OLD_DOCKER_ENV_PATH" "$DOCKER_ENV_PATH" + OS_NAME=$(uname) + if [ "$OS_NAME" == "Darwin" ]; + then + sed -i '' -e 's@APP_RELEASE=latest@APP_RELEASE=stable@' "$DOCKER_ENV_PATH" + else + sed -i -e 's@APP_RELEASE=latest@APP_RELEASE=stable@' "$DOCKER_ENV_PATH" + fi +fi print_header askForAction $@ diff --git a/deploy/selfhost/variables.env b/deploy/selfhost/variables.env index 854204e2e89..1a137299f23 100644 --- a/deploy/selfhost/variables.env +++ b/deploy/selfhost/variables.env @@ -1,4 +1,4 @@ -APP_RELEASE=latest +APP_RELEASE=stable WEB_REPLICAS=1 SPACE_REPLICAS=1 @@ -7,13 +7,8 @@ API_REPLICAS=1 NGINX_PORT=80 WEB_URL=http://localhost DEBUG=0 -NEXT_PUBLIC_DEPLOY_URL=http://localhost/spaces SENTRY_DSN= SENTRY_ENVIRONMENT=production -GOOGLE_CLIENT_ID= -GITHUB_CLIENT_ID= -GITHUB_CLIENT_SECRET= -DOCKERIZED=1 # deprecated CORS_ALLOWED_ORIGINS=http://localhost #DB SETTINGS @@ -23,26 +18,14 @@ POSTGRES_USER=plane POSTGRES_PASSWORD=plane POSTGRES_DB=plane PGDATA=/var/lib/postgresql/data -DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${PGHOST}/${PGDATABASE} +DATABASE_URL= # REDIS SETTINGS REDIS_HOST=plane-redis REDIS_PORT=6379 -REDIS_URL=redis://${REDIS_HOST}:6379/ +REDIS_URL= -# EMAIL SETTINGS -EMAIL_HOST= -EMAIL_HOST_USER= -EMAIL_HOST_PASSWORD= -EMAIL_PORT=587 -EMAIL_FROM=Team Plane -EMAIL_USE_TLS=1 -EMAIL_USE_SSL=0 - -# LOGIN/SIGNUP SETTINGS -ENABLE_SIGNUP=1 -ENABLE_EMAIL_PASSWORD=1 -ENABLE_MAGIC_LINK_LOGIN=0 +# Secret Key SECRET_KEY=60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5 # Enable OpenID Connect Login - You can set the Discovery URL to get the Enpoints (URLs) automatically or set them manually # If you set the Endpoints manually the Discovery URL should be empty to avoid overriding the endpoints @@ -68,4 +51,4 @@ BUCKET_NAME=uploads FILE_SIZE_LIMIT=5242880 # Gunicorn Workers -GUNICORN_WORKERS=2 +GUNICORN_WORKERS=1 diff --git a/nginx/nginx.conf.template b/nginx/nginx.conf.template index 4775dcbfad4..780093b3ba0 100644 --- a/nginx/nginx.conf.template +++ b/nginx/nginx.conf.template @@ -30,7 +30,7 @@ http { } location /${BUCKET_NAME}/ { - proxy_pass http://plane-minio:9000/uploads/; + proxy_pass http://plane-minio:9000/${BUCKET_NAME}/; } } } diff --git a/package.json b/package.json index 9239a9b41fa..c87384f3947 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "repository": "https://github.com/makeplane/plane.git", - "version": "0.16.0", + "version": "0.17.0", "license": "AGPL-3.0", "private": true, "workspaces": [ diff --git a/packages/editor/core/package.json b/packages/editor/core/package.json index fcb6b57bbb2..a0d7351d302 100644 --- a/packages/editor/core/package.json +++ b/packages/editor/core/package.json @@ -1,6 +1,6 @@ { "name": "@plane/editor-core", - "version": "0.16.0", + "version": "0.17.0", "description": "Core Editor that powers Plane", "private": true, "main": "./dist/index.mjs", @@ -53,14 +53,13 @@ "react-moveable": "^0.54.2", "tailwind-merge": "^1.14.0", "tippy.js": "^6.3.7", - "tiptap-markdown": "^0.8.2" + "tiptap-markdown": "^0.8.9" }, "devDependencies": { "@types/node": "18.15.3", "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", - "eslint": "^7.32.0", - "eslint-config-next": "13.2.4", + "eslint-config-custom": "*", "postcss": "^8.4.29", "tailwind-config-custom": "*", "tsconfig": "*", diff --git a/packages/editor/core/src/helpers/insert-content-at-cursor-position.ts b/packages/editor/core/src/helpers/insert-content-at-cursor-position.ts new file mode 100644 index 00000000000..062acafcb1b --- /dev/null +++ b/packages/editor/core/src/helpers/insert-content-at-cursor-position.ts @@ -0,0 +1,17 @@ +import { Selection } from "@tiptap/pm/state"; +import { Editor } from "@tiptap/react"; +import { MutableRefObject } from "react"; + +export const insertContentAtSavedSelection = ( + editorRef: MutableRefObject, + content: string, + savedSelection: Selection +) => { + if (editorRef.current && savedSelection) { + editorRef.current + .chain() + .focus() + .insertContentAt(savedSelection?.anchor, content) + .run(); + } +}; diff --git a/packages/editor/core/src/hooks/use-editor.tsx b/packages/editor/core/src/hooks/use-editor.tsx index c2923c1e97d..7e6aa5912a0 100644 --- a/packages/editor/core/src/hooks/use-editor.tsx +++ b/packages/editor/core/src/hooks/use-editor.tsx @@ -1,5 +1,5 @@ import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; -import { useImperativeHandle, useRef, MutableRefObject } from "react"; +import { useImperativeHandle, useRef, MutableRefObject, useState } from "react"; import { CoreEditorProps } from "src/ui/props"; import { CoreEditorExtensions } from "src/ui/extensions"; import { EditorProps } from "@tiptap/pm/view"; @@ -8,6 +8,8 @@ import { DeleteImage } from "src/types/delete-image"; import { IMentionSuggestion } from "src/types/mention-suggestion"; import { RestoreImage } from "src/types/restore-image"; import { UploadImage } from "src/types/upload-image"; +import { Selection } from "@tiptap/pm/state"; +import { insertContentAtSavedSelection } from "src/helpers/insert-content-at-cursor-position"; interface CustomEditorProps { uploadFile: UploadImage; @@ -70,8 +72,10 @@ export const useEditor = ({ onCreate: async ({ editor }) => { onStart?.(editor.getJSON(), getTrimmedHTML(editor.getHTML())); }, + onTransaction: async ({ editor }) => { + setSavedSelection(editor.state.selection); + }, onUpdate: async ({ editor }) => { - // for instant feedback loop setIsSubmitting?.("submitting"); setShouldShowAlert?.(true); onChange?.(editor.getJSON(), getTrimmedHTML(editor.getHTML())); @@ -83,6 +87,8 @@ export const useEditor = ({ const editorRef: MutableRefObject = useRef(null); editorRef.current = editor; + const [savedSelection, setSavedSelection] = useState(null); + useImperativeHandle(forwardedRef, () => ({ clearEditor: () => { editorRef.current?.commands.clearContent(); @@ -90,6 +96,11 @@ export const useEditor = ({ setEditorValue: (content: string) => { editorRef.current?.commands.setContent(content); }, + setEditorValueAtCursorPosition: (content: string) => { + if (savedSelection) { + insertContentAtSavedSelection(editorRef, content, savedSelection); + } + }, })); if (!editor) { diff --git a/packages/editor/core/src/lib/editor-commands.ts b/packages/editor/core/src/lib/editor-commands.ts index 6524d1ff58a..7c3e7f11e60 100644 --- a/packages/editor/core/src/lib/editor-commands.ts +++ b/packages/editor/core/src/lib/editor-commands.ts @@ -4,18 +4,18 @@ import { findTableAncestor } from "src/lib/utils"; import { UploadImage } from "src/types/upload-image"; export const toggleHeadingOne = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run(); - else editor.chain().focus().toggleHeading({ level: 1 }).run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 1 }).run(); + else editor.chain().focus().clearNodes().toggleHeading({ level: 1 }).run(); }; export const toggleHeadingTwo = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run(); - else editor.chain().focus().toggleHeading({ level: 2 }).run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 2 }).run(); + else editor.chain().focus().clearNodes().toggleHeading({ level: 2 }).run(); }; export const toggleHeadingThree = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run(); - else editor.chain().focus().toggleHeading({ level: 3 }).run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 3 }).run(); + else editor.chain().focus().clearNodes().toggleHeading({ level: 3 }).run(); }; export const toggleBold = (editor: Editor, range?: Range) => { @@ -37,10 +37,10 @@ export const toggleCodeBlock = (editor: Editor, range?: Range) => { // Check if code block is active then toggle code block if (editor.isActive("codeBlock")) { if (range) { - editor.chain().focus().deleteRange(range).toggleCodeBlock().run(); + editor.chain().focus().deleteRange(range).clearNodes().toggleCodeBlock().run(); return; } - editor.chain().focus().toggleCodeBlock().run(); + editor.chain().focus().clearNodes().toggleCodeBlock().run(); return; } @@ -49,32 +49,32 @@ export const toggleCodeBlock = (editor: Editor, range?: Range) => { if (isSelectionEmpty) { if (range) { - editor.chain().focus().deleteRange(range).toggleCodeBlock().run(); + editor.chain().focus().deleteRange(range).clearNodes().toggleCodeBlock().run(); return; } - editor.chain().focus().toggleCodeBlock().run(); + editor.chain().focus().clearNodes().toggleCodeBlock().run(); } else { if (range) { - editor.chain().focus().deleteRange(range).toggleCode().run(); + editor.chain().focus().deleteRange(range).clearNodes().toggleCode().run(); return; } - editor.chain().focus().toggleCode().run(); + editor.chain().focus().clearNodes().toggleCode().run(); } }; export const toggleOrderedList = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).toggleOrderedList().run(); - else editor.chain().focus().toggleOrderedList().run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleOrderedList().run(); + else editor.chain().focus().clearNodes().toggleOrderedList().run(); }; export const toggleBulletList = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).toggleBulletList().run(); - else editor.chain().focus().toggleBulletList().run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleBulletList().run(); + else editor.chain().focus().clearNodes().toggleBulletList().run(); }; export const toggleTaskList = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).toggleTaskList().run(); - else editor.chain().focus().toggleTaskList().run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleTaskList().run(); + else editor.chain().focus().clearNodes().toggleTaskList().run(); }; export const toggleStrike = (editor: Editor, range?: Range) => { @@ -83,8 +83,8 @@ export const toggleStrike = (editor: Editor, range?: Range) => { }; export const toggleBlockquote = (editor: Editor, range?: Range) => { - if (range) editor.chain().focus().deleteRange(range).toggleBlockquote().run(); - else editor.chain().focus().toggleBlockquote().run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleBlockquote().run(); + else editor.chain().focus().clearNodes().toggleBlockquote().run(); }; export const insertTableCommand = (editor: Editor, range?: Range) => { @@ -97,8 +97,8 @@ export const insertTableCommand = (editor: Editor, range?: Range) => { } } } - if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3 }).run(); - else editor.chain().focus().insertTable({ rows: 3, cols: 3 }).run(); + if (range) editor.chain().focus().deleteRange(range).clearNodes().insertTable({ rows: 3, cols: 3 }).run(); + else editor.chain().focus().clearNodes().insertTable({ rows: 3, cols: 3 }).run(); }; export const unsetLinkEditor = (editor: Editor) => { diff --git a/packages/editor/core/src/lib/utils.ts b/packages/editor/core/src/lib/utils.ts index 5c7a8f08faa..c943d4c6048 100644 --- a/packages/editor/core/src/lib/utils.ts +++ b/packages/editor/core/src/lib/utils.ts @@ -1,3 +1,4 @@ +import { Selection } from "@tiptap/pm/state"; import { clsx, type ClassValue } from "clsx"; import { twMerge } from "tailwind-merge"; interface EditorClassNames { @@ -18,6 +19,19 @@ export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); } +// Helper function to find the parent node of a specific type +export function findParentNodeOfType(selection: Selection, typeName: string) { + let depth = selection.$anchor.depth; + while (depth > 0) { + const node = selection.$anchor.node(depth); + if (node.type.name === typeName) { + return { node, pos: selection.$anchor.start(depth) - 1 }; + } + depth--; + } + return null; +} + export const findTableAncestor = (node: Node | null): HTMLTableElement | null => { while (node !== null && node.nodeName !== "TABLE") { node = node.parentNode; diff --git a/packages/editor/core/src/styles/table.css b/packages/editor/core/src/styles/table.css index ca384d34fc6..3ba17ee1b28 100644 --- a/packages/editor/core/src/styles/table.css +++ b/packages/editor/core/src/styles/table.css @@ -98,7 +98,7 @@ top: 0; bottom: -2px; width: 4px; - z-index: 99; + z-index: 5; background-color: #d9e4ff; pointer-events: none; } @@ -111,7 +111,7 @@ .tableWrapper .tableControls .rowsControl { transition: opacity ease-in 100ms; position: absolute; - z-index: 99; + z-index: 5; display: flex; justify-content: center; align-items: center; @@ -198,7 +198,7 @@ .tableWrapper .tableControls .tableToolbox .toolboxItem:hover, .tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem:hover { - background-color: rgba(var(--color-background-100), 0.5); + background-color: rgba(var(--color-background-80), 0.6); } .tableWrapper .tableControls .tableToolbox .toolboxItem .iconContainer, diff --git a/packages/editor/core/src/ui/components/editor-container.tsx b/packages/editor/core/src/ui/components/editor-container.tsx index 5480a51e931..2d608152500 100644 --- a/packages/editor/core/src/ui/components/editor-container.tsx +++ b/packages/editor/core/src/ui/components/editor-container.tsx @@ -1,5 +1,6 @@ import { Editor } from "@tiptap/react"; -import { ReactNode } from "react"; +import { FC, ReactNode } from "react"; +import { cn } from "src/lib/utils"; interface EditorContainerProps { editor: Editor | null; @@ -8,17 +9,54 @@ interface EditorContainerProps { hideDragHandle?: () => void; } -export const EditorContainer = ({ editor, editorClassNames, hideDragHandle, children }: EditorContainerProps) => ( -
{ - editor?.chain().focus(undefined, { scrollIntoView: false }).run(); - }} - onMouseLeave={() => { - hideDragHandle?.(); - }} - className={`cursor-text ${editorClassNames}`} - > - {children} -
-); +export const EditorContainer: FC = (props) => { + const { editor, editorClassNames, hideDragHandle, children } = props; + + const handleContainerClick = () => { + if (!editor) return; + if (!editor.isEditable) return; + if (editor.isFocused) return; // If editor is already focused, do nothing + + const { selection } = editor.state; + const currentNode = selection.$from.node(); + + editor?.chain().focus("end", { scrollIntoView: false }).run(); // Focus the editor at the end + + if ( + currentNode.content.size === 0 && // Check if the current node is empty + !( + editor.isActive("orderedList") || + editor.isActive("bulletList") || + editor.isActive("taskItem") || + editor.isActive("table") || + editor.isActive("blockquote") || + editor.isActive("codeBlock") + ) // Check if it's an empty node within an orderedList, bulletList, taskItem, table, quote or code block + ) { + return; + } + + // Insert a new paragraph at the end of the document + const endPosition = editor?.state.doc.content.size; + editor?.chain().insertContentAt(endPosition, { type: "paragraph" }).run(); + + // Focus the newly added paragraph for immediate editing + editor + .chain() + .setTextSelection(endPosition + 1) + .run(); + }; + + return ( +
{ + hideDragHandle?.(); + }} + className={cn(`cursor-text`, { "active-editor": editor?.isFocused && editor?.isEditable }, editorClassNames)} + > + {children} +
+ ); +}; diff --git a/packages/editor/core/src/ui/components/editor-content.tsx b/packages/editor/core/src/ui/components/editor-content.tsx index 9c09387882b..7a6ce30f770 100644 --- a/packages/editor/core/src/ui/components/editor-content.tsx +++ b/packages/editor/core/src/ui/components/editor-content.tsx @@ -1,17 +1,28 @@ import { Editor, EditorContent } from "@tiptap/react"; -import { ReactNode } from "react"; +import { FC, ReactNode } from "react"; import { ImageResizer } from "src/ui/extensions/image/image-resize"; interface EditorContentProps { editor: Editor | null; editorContentCustomClassNames: string | undefined; children?: ReactNode; + tabIndex?: number; } -export const EditorContentWrapper = ({ editor, editorContentCustomClassNames = "", children }: EditorContentProps) => ( -
- - {editor?.isActive("image") && editor?.isEditable && } - {children} -
-); +export const EditorContentWrapper: FC = (props) => { + const { editor, editorContentCustomClassNames = "", tabIndex, children } = props; + + return ( +
{ + editor?.chain().focus(undefined, { scrollIntoView: false }).run(); + }} + > + + {editor?.isActive("image") && editor?.isEditable && } + {children} +
+ ); +}; diff --git a/packages/editor/core/src/ui/extensions/custom-link/helpers/clickHandler.ts b/packages/editor/core/src/ui/extensions/custom-link/helpers/clickHandler.ts index 0854092a9e4..ec6c540dacc 100644 --- a/packages/editor/core/src/ui/extensions/custom-link/helpers/clickHandler.ts +++ b/packages/editor/core/src/ui/extensions/custom-link/helpers/clickHandler.ts @@ -15,9 +15,15 @@ export function clickHandler(options: ClickHandlerOptions): Plugin { return false; } - const eventTarget = event.target as HTMLElement; + let a = event.target as HTMLElement; + const els = []; - if (eventTarget.nodeName !== "A") { + while (a.nodeName !== "DIV") { + els.push(a); + a = a.parentNode as HTMLElement; + } + + if (!els.find((value) => value.nodeName === "A")) { return false; } @@ -28,9 +34,7 @@ export function clickHandler(options: ClickHandlerOptions): Plugin { const target = link?.target ?? attrs.target; if (link && href) { - if (view.editable) { - window.open(href, target); - } + window.open(href, target); return true; } diff --git a/packages/editor/core/src/ui/extensions/custom-link/helpers/pasteHandler.ts b/packages/editor/core/src/ui/extensions/custom-link/helpers/pasteHandler.ts index 83e38054c74..475bf28d94b 100644 --- a/packages/editor/core/src/ui/extensions/custom-link/helpers/pasteHandler.ts +++ b/packages/editor/core/src/ui/extensions/custom-link/helpers/pasteHandler.ts @@ -33,16 +33,8 @@ export function pasteHandler(options: PasteHandlerOptions): Plugin { return false; } - const html = event.clipboardData?.getData("text/html"); - - const hrefRegex = /href="([^"]*)"/; - - const existingLink = html?.match(hrefRegex); - - const url = existingLink ? existingLink[1] : link.href; - options.editor.commands.setMark(options.type, { - href: url, + href: link.href, }); return true; diff --git a/packages/editor/core/src/ui/extensions/custom-link/index.tsx b/packages/editor/core/src/ui/extensions/custom-link/index.ts similarity index 69% rename from packages/editor/core/src/ui/extensions/custom-link/index.tsx rename to packages/editor/core/src/ui/extensions/custom-link/index.ts index e66d18904f8..88e7abfe57c 100644 --- a/packages/editor/core/src/ui/extensions/custom-link/index.tsx +++ b/packages/editor/core/src/ui/extensions/custom-link/index.ts @@ -1,41 +1,76 @@ -import { Mark, markPasteRule, mergeAttributes } from "@tiptap/core"; +import { Mark, markPasteRule, mergeAttributes, PasteRuleMatch } from "@tiptap/core"; import { Plugin } from "@tiptap/pm/state"; import { find, registerCustomProtocol, reset } from "linkifyjs"; - -import { autolink } from "src/ui/extensions/custom-link/helpers/autolink"; -import { clickHandler } from "src/ui/extensions/custom-link/helpers/clickHandler"; -import { pasteHandler } from "src/ui/extensions/custom-link/helpers/pasteHandler"; +import { autolink } from "./helpers/autolink"; +import { clickHandler } from "./helpers/clickHandler"; +import { pasteHandler } from "./helpers/pasteHandler"; export interface LinkProtocolOptions { scheme: string; optionalSlashes?: boolean; } +export const pasteRegex = + /https?:\/\/(?:www\.)?[-a-zA-Z0-9@:%._+~#=]{1,256}\.[a-zA-Z]{2,}\b(?:[-a-zA-Z0-9@:%._+~#=?!&/]*)(?:[-a-zA-Z0-9@:%._+~#=?!&/]*)/gi; + export interface LinkOptions { + /** + * If enabled, it adds links as you type. + */ autolink: boolean; - inclusive: boolean; + /** + * An array of custom protocols to be registered with linkifyjs. + */ protocols: Array; + /** + * If enabled, links will be opened on click. + */ openOnClick: boolean; + /** + * If enabled, links will be inclusive i.e. if you move your cursor to the + * link text, and start typing, it'll be a part of the link itself. + */ + inclusive: boolean; + /** + * Adds a link to the current selection if the pasted content only contains an url. + */ linkOnPaste: boolean; + /** + * A list of HTML attributes to be rendered. + */ HTMLAttributes: Record; + /** + * A validation function that modifies link verification for the auto linker. + * @param url - The url to be validated. + * @returns - True if the url is valid, false otherwise. + */ validate?: (url: string) => boolean; } declare module "@tiptap/core" { interface Commands { link: { + /** + * Set a link mark + */ setLink: (attributes: { href: string; target?: string | null; rel?: string | null; class?: string | null; }) => ReturnType; + /** + * Toggle a link mark + */ toggleLink: (attributes: { href: string; target?: string | null; rel?: string | null; class?: string | null; }) => ReturnType; + /** + * Unset a link mark + */ unsetLink: () => ReturnType; }; } @@ -150,37 +185,31 @@ export const CustomLinkExtension = Mark.create({ addPasteRules() { return [ markPasteRule({ - find: (text) => - find(text) - .filter((link) => { - if (this.options.validate) { - return this.options.validate(link.value); - } - return true; - }) - .filter((link) => link.isLink) - .map((link) => ({ - text: link.value, - index: link.start, - data: link, - })), - type: this.type, - getAttributes: (match, pasteEvent) => { - const html = pasteEvent?.clipboardData?.getData("text/html"); - const hrefRegex = /href="([^"]*)"/; - - const existingLink = html?.match(hrefRegex); - - if (existingLink) { - return { - href: existingLink[1], - }; + find: (text) => { + const foundLinks: PasteRuleMatch[] = []; + + if (text) { + const links = find(text).filter((item) => item.isLink); + + if (links.length) { + links.forEach((link) => + foundLinks.push({ + text: link.value, + data: { + href: link.href, + }, + index: link.start, + }) + ); + } } - return { - href: match.data?.href, - }; + return foundLinks; }, + type: this.type, + getAttributes: (match) => ({ + href: match.data?.href, + }), }), ]; }, diff --git a/packages/editor/core/src/ui/extensions/horizontal-rule/horizontal-rule.ts b/packages/editor/core/src/ui/extensions/horizontal-rule/horizontal-rule.ts new file mode 100644 index 00000000000..2af845b7a08 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/horizontal-rule/horizontal-rule.ts @@ -0,0 +1,111 @@ +import { isNodeSelection, mergeAttributes, Node, nodeInputRule } from "@tiptap/core"; +import { NodeSelection, TextSelection } from "@tiptap/pm/state"; + +export interface HorizontalRuleOptions { + HTMLAttributes: Record; +} + +declare module "@tiptap/core" { + interface Commands { + horizontalRule: { + /** + * Add a horizontal rule + */ + setHorizontalRule: () => ReturnType; + }; + } +} + +export const CustomHorizontalRule = Node.create({ + name: "horizontalRule", + + addOptions() { + return { + HTMLAttributes: {}, + }; + }, + + group: "block", + + parseHTML() { + return [{ tag: "hr" }]; + }, + + renderHTML({ HTMLAttributes }) { + return ["hr", mergeAttributes(this.options.HTMLAttributes, HTMLAttributes)]; + }, + + addCommands() { + return { + setHorizontalRule: + () => + ({ chain, state }) => { + const { selection } = state; + const { $from: $originFrom, $to: $originTo } = selection; + + const currentChain = chain(); + + if ($originFrom.parentOffset === 0) { + currentChain.insertContentAt( + { + from: Math.max($originFrom.pos - 1, 0), + to: $originTo.pos, + }, + { + type: this.name, + } + ); + } else if (isNodeSelection(selection)) { + currentChain.insertContentAt($originTo.pos, { + type: this.name, + }); + } else { + currentChain.insertContent({ type: this.name }); + } + + return ( + currentChain + // set cursor after horizontal rule + .command(({ tr, dispatch }) => { + if (dispatch) { + const { $to } = tr.selection; + const posAfter = $to.end(); + + if ($to.nodeAfter) { + if ($to.nodeAfter.isTextblock) { + tr.setSelection(TextSelection.create(tr.doc, $to.pos + 1)); + } else if ($to.nodeAfter.isBlock) { + tr.setSelection(NodeSelection.create(tr.doc, $to.pos)); + } else { + tr.setSelection(TextSelection.create(tr.doc, $to.pos)); + } + } else { + // add node after horizontal rule if it’s the end of the document + const node = $to.parent.type.contentMatch.defaultType?.create(); + + if (node) { + tr.insert(posAfter, node); + tr.setSelection(TextSelection.create(tr.doc, posAfter + 1)); + } + } + + tr.scrollIntoView(); + } + + return true; + }) + .run() + ); + }, + }; + }, + + addInputRules() { + return [ + nodeInputRule({ + find: /^(?:---|—-|___\s|\*\*\*\s)$/, + type: this.type, + }), + ]; + }, +}); diff --git a/packages/editor/core/src/ui/extensions/image/index.tsx b/packages/editor/core/src/ui/extensions/image/index.tsx index db8b1c97bb6..1431b77558a 100644 --- a/packages/editor/core/src/ui/extensions/image/index.tsx +++ b/packages/editor/core/src/ui/extensions/image/index.tsx @@ -5,6 +5,8 @@ import ImageExt from "@tiptap/extension-image"; import { onNodeDeleted, onNodeRestored } from "src/ui/plugins/delete-image"; import { DeleteImage } from "src/types/delete-image"; import { RestoreImage } from "src/types/restore-image"; +import { insertLineBelowImageAction } from "./utilities/insert-line-below-image"; +import { insertLineAboveImageAction } from "./utilities/insert-line-above-image"; interface ImageNode extends ProseMirrorNode { attrs: { @@ -18,6 +20,12 @@ const IMAGE_NODE_TYPE = "image"; export const ImageExtension = (deleteImage: DeleteImage, restoreFile: RestoreImage, cancelUploadImage?: () => any) => ImageExt.extend({ + addKeyboardShortcuts() { + return { + ArrowDown: insertLineBelowImageAction, + ArrowUp: insertLineAboveImageAction, + }; + }, addProseMirrorPlugins() { return [ UploadImagesPlugin(cancelUploadImage), diff --git a/packages/editor/core/src/ui/extensions/image/utilities/insert-line-above-image.ts b/packages/editor/core/src/ui/extensions/image/utilities/insert-line-above-image.ts new file mode 100644 index 00000000000..a18576b4627 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/image/utilities/insert-line-above-image.ts @@ -0,0 +1,45 @@ +import { Node as ProseMirrorNode } from "@tiptap/pm/model"; +import { KeyboardShortcutCommand } from "@tiptap/core"; + +export const insertLineAboveImageAction: KeyboardShortcutCommand = ({ editor }) => { + const { selection, doc } = editor.state; + const { $from, $to } = selection; + + let imageNode: ProseMirrorNode | null = null; + let imagePos: number | null = null; + + // Check if the selection itself is an image node + doc.nodesBetween($from.pos, $to.pos, (node, pos) => { + if (node.type.name === "image") { + imageNode = node; + imagePos = pos; + return false; // Stop iterating once an image node is found + } + return true; + }); + + if (imageNode === null || imagePos === null) return false; + + // Since we want to insert above the image, we use the imagePos directly + const insertPos = imagePos; + + if (insertPos < 0) return false; + + // Check for an existing node immediately before the image + if (insertPos === 0) { + // If the previous node doesn't exist or isn't a paragraph, create and insert a new empty node there + editor.chain().insertContentAt(insertPos, { type: "paragraph" }).run(); + editor.chain().setTextSelection(insertPos).run(); + } else { + const prevNode = doc.nodeAt(insertPos); + + if (prevNode && prevNode.type.name === "paragraph") { + // If the previous node is a paragraph, move the cursor there + editor.chain().setTextSelection(insertPos).run(); + } else { + return false; + } + } + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/image/utilities/insert-line-below-image.ts b/packages/editor/core/src/ui/extensions/image/utilities/insert-line-below-image.ts new file mode 100644 index 00000000000..e998c728b93 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/image/utilities/insert-line-below-image.ts @@ -0,0 +1,46 @@ +import { Node as ProseMirrorNode } from "@tiptap/pm/model"; +import { KeyboardShortcutCommand } from "@tiptap/core"; + +export const insertLineBelowImageAction: KeyboardShortcutCommand = ({ editor }) => { + const { selection, doc } = editor.state; + const { $from, $to } = selection; + + let imageNode: ProseMirrorNode | null = null; + let imagePos: number | null = null; + + // Check if the selection itself is an image node + doc.nodesBetween($from.pos, $to.pos, (node, pos) => { + if (node.type.name === "image") { + imageNode = node; + imagePos = pos; + return false; // Stop iterating once an image node is found + } + return true; + }); + + if (imageNode === null || imagePos === null) return false; + + const guaranteedImageNode: ProseMirrorNode = imageNode; + const nextNodePos = imagePos + guaranteedImageNode.nodeSize; + + // Check for an existing node immediately after the image + const nextNode = doc.nodeAt(nextNodePos); + + if (nextNode && nextNode.type.name === "paragraph") { + // If the next node is a paragraph, move the cursor there + const endOfParagraphPos = nextNodePos + nextNode.nodeSize - 1; + editor.chain().setTextSelection(endOfParagraphPos).run(); + } else if (!nextNode) { + // If the next node doesn't exist i.e. we're at the end of the document, create and insert a new empty node there + editor.chain().insertContentAt(nextNodePos, { type: "paragraph" }).run(); + editor + .chain() + .setTextSelection(nextNodePos + 1) + .run(); + } else { + // If the next node is not a paragraph, do not proceed + return false; + } + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/index.tsx b/packages/editor/core/src/ui/extensions/index.tsx index 190731fe0b6..1a932d6d51c 100644 --- a/packages/editor/core/src/ui/extensions/index.tsx +++ b/packages/editor/core/src/ui/extensions/index.tsx @@ -27,6 +27,7 @@ import { RestoreImage } from "src/types/restore-image"; import { CustomLinkExtension } from "src/ui/extensions/custom-link"; import { CustomCodeInlineExtension } from "src/ui/extensions/code-inline"; import { CustomTypographyExtension } from "src/ui/extensions/typography"; +import { CustomHorizontalRule } from "src/ui/extensions/horizontal-rule/horizontal-rule"; export const CoreEditorExtensions = ( mentionConfig: { @@ -55,9 +56,7 @@ export const CoreEditorExtensions = ( }, code: false, codeBlock: false, - horizontalRule: { - HTMLAttributes: { class: "mt-4 mb-4" }, - }, + horizontalRule: false, blockquote: false, dropcursor: { color: "rgba(var(--color-text-100))", @@ -67,6 +66,9 @@ export const CoreEditorExtensions = ( CustomQuoteExtension.configure({ HTMLAttributes: { className: "border-l-4 border-custom-border-300" }, }), + CustomHorizontalRule.configure({ + HTMLAttributes: { class: "mt-4 mb-4" }, + }), CustomKeymap, ListKeymap, CustomLinkExtension.configure({ diff --git a/packages/editor/core/src/ui/extensions/table/table/table-view.tsx b/packages/editor/core/src/ui/extensions/table/table/table-view.tsx index 674a8e1150a..2941179c7c5 100644 --- a/packages/editor/core/src/ui/extensions/table/table/table-view.tsx +++ b/packages/editor/core/src/ui/extensions/table/table/table-view.tsx @@ -213,10 +213,11 @@ function createToolbox({ { className: "colorPicker grid" }, Object.entries(colors).map(([colorName, colorValue]) => h("div", { - className: "colorPickerItem", + className: "colorPickerItem flex items-center justify-center", style: `background-color: ${colorValue.backgroundColor}; - color: ${colorValue.textColor || "inherit"};`, - innerHTML: colorValue?.icon || "", + color: ${colorValue.textColor || "inherit"};`, + innerHTML: + colorValue.icon ?? `A`, onClick: () => onSelectColor(colorValue), }) ) diff --git a/packages/editor/core/src/ui/extensions/table/table/table.ts b/packages/editor/core/src/ui/extensions/table/table/table.ts index ef595eee209..5fd06caf6af 100644 --- a/packages/editor/core/src/ui/extensions/table/table/table.ts +++ b/packages/editor/core/src/ui/extensions/table/table/table.ts @@ -25,6 +25,8 @@ import { tableControls } from "src/ui/extensions/table/table/table-controls"; import { TableView } from "src/ui/extensions/table/table/table-view"; import { createTable } from "src/ui/extensions/table/table/utilities/create-table"; import { deleteTableWhenAllCellsSelected } from "src/ui/extensions/table/table/utilities/delete-table-when-all-cells-selected"; +import { insertLineBelowTableAction } from "./utilities/insert-line-below-table-action"; +import { insertLineAboveTableAction } from "./utilities/insert-line-above-table-action"; export interface TableOptions { HTMLAttributes: Record; @@ -231,6 +233,8 @@ export const Table = Node.create({ "Mod-Backspace": deleteTableWhenAllCellsSelected, Delete: deleteTableWhenAllCellsSelected, "Mod-Delete": deleteTableWhenAllCellsSelected, + ArrowDown: insertLineBelowTableAction, + ArrowUp: insertLineAboveTableAction, }; }, diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/insert-line-above-table-action.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/insert-line-above-table-action.ts new file mode 100644 index 00000000000..d61d21c5b39 --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/insert-line-above-table-action.ts @@ -0,0 +1,50 @@ +import { KeyboardShortcutCommand } from "@tiptap/core"; +import { findParentNodeOfType } from "src/lib/utils"; + +export const insertLineAboveTableAction: KeyboardShortcutCommand = ({ editor }) => { + // Check if the current selection or the closest node is a table + if (!editor.isActive("table")) return false; + + // Get the current selection + const { selection } = editor.state; + + // Find the table node and its position + const tableNode = findParentNodeOfType(selection, "table"); + if (!tableNode) return false; + + const tablePos = tableNode.pos; + + // Determine if the selection is in the first row of the table + const firstRow = tableNode.node.child(0); + const selectionPath = (selection.$anchor as any).path; + const selectionInFirstRow = selectionPath.includes(firstRow); + + if (!selectionInFirstRow) return false; + + // Check if the table is at the very start of the document or its parent node + if (tablePos === 0) { + // The table is at the start, so just insert a paragraph at the current position + editor.chain().insertContentAt(tablePos, { type: "paragraph" }).run(); + editor + .chain() + .setTextSelection(tablePos + 1) + .run(); + } else { + // The table is not at the start, check for the node immediately before the table + const prevNodePos = tablePos - 1; + + if (prevNodePos <= 0) return false; + + const prevNode = editor.state.doc.nodeAt(prevNodePos - 1); + + if (prevNode && prevNode.type.name === "paragraph") { + // If there's a paragraph before the table, move the cursor to the end of that paragraph + const endOfParagraphPos = tablePos - prevNode.nodeSize; + editor.chain().setTextSelection(endOfParagraphPos).run(); + } else { + return false; + } + } + + return true; +}; diff --git a/packages/editor/core/src/ui/extensions/table/table/utilities/insert-line-below-table-action.ts b/packages/editor/core/src/ui/extensions/table/table/utilities/insert-line-below-table-action.ts new file mode 100644 index 00000000000..28b46084aba --- /dev/null +++ b/packages/editor/core/src/ui/extensions/table/table/utilities/insert-line-below-table-action.ts @@ -0,0 +1,48 @@ +import { KeyboardShortcutCommand } from "@tiptap/core"; +import { findParentNodeOfType } from "src/lib/utils"; + +export const insertLineBelowTableAction: KeyboardShortcutCommand = ({ editor }) => { + // Check if the current selection or the closest node is a table + if (!editor.isActive("table")) return false; + + // Get the current selection + const { selection } = editor.state; + + // Find the table node and its position + const tableNode = findParentNodeOfType(selection, "table"); + if (!tableNode) return false; + + const tablePos = tableNode.pos; + const table = tableNode.node; + + // Determine if the selection is in the last row of the table + const rowCount = table.childCount; + const lastRow = table.child(rowCount - 1); + const selectionPath = (selection.$anchor as any).path; + const selectionInLastRow = selectionPath.includes(lastRow); + + if (!selectionInLastRow) return false; + + // Calculate the position immediately after the table + const nextNodePos = tablePos + table.nodeSize; + + // Check for an existing node immediately after the table + const nextNode = editor.state.doc.nodeAt(nextNodePos); + + if (nextNode && nextNode.type.name === "paragraph") { + // If the next node is an paragraph, move the cursor there + const endOfParagraphPos = nextNodePos + nextNode.nodeSize - 1; + editor.chain().setTextSelection(endOfParagraphPos).run(); + } else if (!nextNode) { + // If the next node doesn't exist i.e. we're at the end of the document, create and insert a new empty node there + editor.chain().insertContentAt(nextNodePos, { type: "paragraph" }).run(); + editor + .chain() + .setTextSelection(nextNodePos + 1) + .run(); + } else { + return false; + } + + return true; +}; diff --git a/packages/editor/core/src/ui/mentions/suggestion.ts b/packages/editor/core/src/ui/mentions/suggestion.ts index 40e75a1e381..3f1b8eeecce 100644 --- a/packages/editor/core/src/ui/mentions/suggestion.ts +++ b/packages/editor/core/src/ui/mentions/suggestion.ts @@ -22,7 +22,7 @@ export const Suggestion = (suggestions: IMentionSuggestion[]) => ({ // @ts-ignore popup = tippy("body", { getReferenceClientRect: props.clientRect, - appendTo: () => document.querySelector("#editor-container"), + appendTo: () => document.querySelector(".active-editor") ?? document.querySelector("#editor-container"), content: reactRenderer.element, showOnCreate: true, interactive: true, diff --git a/packages/editor/core/src/ui/read-only/extensions.tsx b/packages/editor/core/src/ui/read-only/extensions.tsx index cf7c4ee1823..93e1b388722 100644 --- a/packages/editor/core/src/ui/read-only/extensions.tsx +++ b/packages/editor/core/src/ui/read-only/extensions.tsx @@ -5,7 +5,6 @@ import { Color } from "@tiptap/extension-color"; import TaskItem from "@tiptap/extension-task-item"; import TaskList from "@tiptap/extension-task-list"; import { Markdown } from "tiptap-markdown"; -import Gapcursor from "@tiptap/extension-gapcursor"; import { TableHeader } from "src/ui/extensions/table/table-header/table-header"; import { Table } from "src/ui/extensions/table/table"; @@ -17,6 +16,11 @@ import { isValidHttpUrl } from "src/lib/utils"; import { Mentions } from "src/ui/mentions"; import { IMentionSuggestion } from "src/types/mention-suggestion"; import { CustomLinkExtension } from "src/ui/extensions/custom-link"; +import { CustomHorizontalRule } from "src/ui/extensions/horizontal-rule/horizontal-rule"; +import { CustomQuoteExtension } from "src/ui/extensions/quote"; +import { CustomTypographyExtension } from "src/ui/extensions/typography"; +import { CustomCodeBlockExtension } from "src/ui/extensions/code"; +import { CustomCodeInlineExtension } from "src/ui/extensions/code-inline"; export const CoreReadOnlyEditorExtensions = (mentionConfig: { mentionSuggestions: IMentionSuggestion[]; @@ -38,36 +42,31 @@ export const CoreReadOnlyEditorExtensions = (mentionConfig: { class: "leading-normal -mb-2", }, }, - blockquote: { - HTMLAttributes: { - class: "border-l-4 border-custom-border-300", - }, - }, - code: { - HTMLAttributes: { - class: "rounded-md bg-custom-primary-30 mx-1 px-1 py-1 font-mono font-medium text-custom-text-1000", - spellcheck: "false", - }, - }, + code: false, codeBlock: false, - horizontalRule: { - HTMLAttributes: { class: "mt-4 mb-4" }, - }, - dropcursor: { - color: "rgba(var(--color-text-100))", - width: 2, - }, + horizontalRule: false, + blockquote: false, + dropcursor: false, gapcursor: false, }), - Gapcursor, + CustomQuoteExtension.configure({ + HTMLAttributes: { className: "border-l-4 border-custom-border-300" }, + }), + CustomHorizontalRule.configure({ + HTMLAttributes: { class: "mt-4 mb-4" }, + }), CustomLinkExtension.configure({ + openOnClick: true, + autolink: true, + linkOnPaste: true, protocols: ["http", "https"], - validate: (url) => isValidHttpUrl(url), + validate: (url: string) => isValidHttpUrl(url), HTMLAttributes: { class: "text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer", }, }), + CustomTypographyExtension, ReadOnlyImageExtension.configure({ HTMLAttributes: { class: "rounded-lg border border-custom-border-300", @@ -87,6 +86,8 @@ export const CoreReadOnlyEditorExtensions = (mentionConfig: { }, nested: true, }), + CustomCodeBlockExtension, + CustomCodeInlineExtension, Markdown.configure({ html: true, transformCopiedText: true, diff --git a/packages/editor/document-editor/package.json b/packages/editor/document-editor/package.json index bd1f2d90fea..be00fcce445 100644 --- a/packages/editor/document-editor/package.json +++ b/packages/editor/document-editor/package.json @@ -1,6 +1,6 @@ { "name": "@plane/document-editor", - "version": "0.16.0", + "version": "0.17.0", "description": "Package that powers Plane's Pages Editor", "main": "./dist/index.mjs", "module": "./dist/index.mjs", @@ -37,7 +37,6 @@ "@tiptap/extension-placeholder": "^2.1.13", "@tiptap/pm": "^2.1.13", "@tiptap/suggestion": "^2.1.13", - "eslint-config-next": "13.2.4", "lucide-react": "^0.309.0", "react-popper": "^2.3.0", "tippy.js": "^6.3.7", @@ -47,7 +46,7 @@ "@types/node": "18.15.3", "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", - "eslint": "8.36.0", + "eslint-config-custom": "*", "postcss": "^8.4.29", "tailwind-config-custom": "*", "tsconfig": "*", diff --git a/packages/editor/document-editor/src/ui/components/content-browser.tsx b/packages/editor/document-editor/src/ui/components/content-browser.tsx index 97231ea966f..926d9a53deb 100644 --- a/packages/editor/document-editor/src/ui/components/content-browser.tsx +++ b/packages/editor/document-editor/src/ui/components/content-browser.tsx @@ -15,11 +15,11 @@ export const ContentBrowser = (props: ContentBrowserProps) => { const handleOnClick = (marking: IMarking) => { scrollSummary(editor, marking); if (setSidePeekVisible) setSidePeekVisible(false); - } + }; return (
-

Table of Contents

+

Outline

{markings.length !== 0 ? ( markings.map((marking) => diff --git a/packages/editor/document-editor/src/ui/components/editor-header.tsx b/packages/editor/document-editor/src/ui/components/editor-header.tsx index a322ddddc5b..33ac4a0dc66 100644 --- a/packages/editor/document-editor/src/ui/components/editor-header.tsx +++ b/packages/editor/document-editor/src/ui/components/editor-header.tsx @@ -7,6 +7,7 @@ import { AlertLabel } from "src/ui/components/alert-label"; import { IVerticalDropdownItemProps, VerticalDropdownMenu } from "src/ui/components/vertical-dropdown-menu"; import { SummaryPopover } from "src/ui/components/summary-popover"; import { InfoPopover } from "src/ui/components/info-popover"; +import { getDate } from "src/utils/date-utils"; interface IEditorHeader { editor: Editor; @@ -72,7 +73,7 @@ export const EditorHeader = (props: IEditorHeader) => { Icon={Archive} backgroundColor="bg-blue-500/20" textColor="text-blue-500" - label={`Archived at ${new Date(archivedAt).toLocaleString()}`} + label={`Archived at ${getDate(archivedAt)?.toLocaleString()}`} /> )} diff --git a/packages/editor/document-editor/src/ui/components/info-popover.tsx b/packages/editor/document-editor/src/ui/components/info-popover.tsx index f78dd347372..16a3452a668 100644 --- a/packages/editor/document-editor/src/ui/components/info-popover.tsx +++ b/packages/editor/document-editor/src/ui/components/info-popover.tsx @@ -3,13 +3,15 @@ import { usePopper } from "react-popper"; import { Calendar, History, Info } from "lucide-react"; // types import { DocumentDetails } from "src/types/editor-types"; +//utils +import { getDate } from "src/utils/date-utils"; type Props = { documentDetails: DocumentDetails; }; // function to render a Date in the format- 25 May 2023 at 2:53PM -const renderDate = (date: Date): string => { +const renderDate = (date: Date | undefined): string => { const options: Intl.DateTimeFormatOptions = { day: "numeric", month: "long", @@ -52,14 +54,14 @@ export const InfoPopover: React.FC = (props) => {
Last updated on
- {renderDate(new Date(documentDetails.last_updated_at))} + {renderDate(getDate(documentDetails?.last_updated_at))}
Created on
- {renderDate(new Date(documentDetails.created_on))} + {renderDate(getDate(documentDetails?.created_on))}
diff --git a/packages/editor/document-editor/src/ui/components/page-renderer.tsx b/packages/editor/document-editor/src/ui/components/page-renderer.tsx index 06b9e70ff2f..7c2717e807e 100644 --- a/packages/editor/document-editor/src/ui/components/page-renderer.tsx +++ b/packages/editor/document-editor/src/ui/components/page-renderer.tsx @@ -29,11 +29,13 @@ type IPageRenderer = { editorContentCustomClassNames?: string; hideDragHandle?: () => void; readonly: boolean; + tabIndex?: number; }; export const PageRenderer = (props: IPageRenderer) => { const { documentDetails, + tabIndex, editor, editorClassNames, editorContentCustomClassNames, @@ -152,7 +154,7 @@ export const PageRenderer = (props: IPageRenderer) => { ); return ( -
+
{!readonly ? ( handlePageTitleChange(e.target.value)} @@ -169,7 +171,11 @@ export const PageRenderer = (props: IPageRenderer) => { )}
- +
{isOpen && linkViewProps && coordinates && ( diff --git a/packages/editor/document-editor/src/ui/components/summary-popover.tsx b/packages/editor/document-editor/src/ui/components/summary-popover.tsx index 6ad7cad835d..41056c6ad26 100644 --- a/packages/editor/document-editor/src/ui/components/summary-popover.tsx +++ b/packages/editor/document-editor/src/ui/components/summary-popover.tsx @@ -33,8 +33,9 @@ export const SummaryPopover: React.FC = (props) => {
void; + tabIndex?: number; } interface DocumentReadOnlyEditorProps extends IDocumentReadOnlyEditor { @@ -51,6 +52,7 @@ const DocumentReadOnlyEditor = ({ pageArchiveConfig, rerenderOnPropsChange, onActionCompleteHandler, + tabIndex, }: DocumentReadOnlyEditorProps) => { const router = useRouter(); const [sidePeekVisible, setSidePeekVisible] = useState(true); @@ -108,9 +110,10 @@ const DocumentReadOnlyEditor = ({
Promise.resolve()} - readonly={true} + readonly editor={editor} editorClassNames={editorClassNames} documentDetails={documentDetails} diff --git a/packages/editor/document-editor/src/utils/date-utils.ts b/packages/editor/document-editor/src/utils/date-utils.ts new file mode 100644 index 00000000000..63c20a974f2 --- /dev/null +++ b/packages/editor/document-editor/src/utils/date-utils.ts @@ -0,0 +1,26 @@ +function isNumber(value: any) { + return typeof value === "number"; +} + +/** + * This method returns a date from string of type yyyy-mm-dd + * This method is recommended to use instead of new Date() as this does not introduce any timezone offsets + * @param date + * @returns date or undefined + */ +export const getDate = (date: string | Date | undefined | null): Date | undefined => { + try { + if (!date || date === "") return; + + if (typeof date !== "string" && !(date instanceof String)) return date; + const [yearString, monthString, dayString] = date.substring(0, 10).split("-"); + const year = parseInt(yearString); + const month = parseInt(monthString); + const day = parseInt(dayString); + if (!isNumber(year) || !isNumber(month) || !isNumber(day)) return; + + return new Date(year, month - 1, day); + } catch (e) { + return undefined; + } +}; diff --git a/packages/editor/extensions/package.json b/packages/editor/extensions/package.json index 0bdd70824eb..711ab8a969a 100644 --- a/packages/editor/extensions/package.json +++ b/packages/editor/extensions/package.json @@ -1,6 +1,6 @@ { "name": "@plane/editor-extensions", - "version": "0.16.0", + "version": "0.17.0", "description": "Package that powers Plane's Editor with extensions", "private": true, "main": "./dist/index.mjs", @@ -33,7 +33,6 @@ "@tiptap/pm": "^2.1.13", "@tiptap/react": "^2.1.13", "@tiptap/suggestion": "^2.1.13", - "eslint-config-next": "13.2.4", "lucide-react": "^0.294.0", "tippy.js": "^6.3.7" }, @@ -41,7 +40,7 @@ "@types/node": "18.15.3", "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", - "eslint": "8.36.0", + "eslint-config-custom": "*", "postcss": "^8.4.29", "tailwind-config-custom": "*", "tsconfig": "*", diff --git a/packages/editor/extensions/src/extensions/slash-commands.tsx b/packages/editor/extensions/src/extensions/slash-commands.tsx index 88e257cef6d..c52178b8186 100644 --- a/packages/editor/extensions/src/extensions/slash-commands.tsx +++ b/packages/editor/extensions/src/extensions/slash-commands.tsx @@ -85,7 +85,10 @@ const getSuggestionItems = searchTerms: ["p", "paragraph"], icon: , command: ({ editor, range }: CommandProps) => { - editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").run(); + if (range) { + editor.chain().focus().deleteRange(range).clearNodes().run(); + } + editor.chain().focus().clearNodes().run(); }, }, { @@ -327,7 +330,7 @@ const renderItems = () => { // @ts-ignore popup = tippy("body", { getReferenceClientRect: props.clientRect, - appendTo: () => document.querySelector("#editor-container"), + appendTo: () => document.querySelector(".active-editor") ?? document.querySelector("#editor-container"), content: component.element, showOnCreate: true, interactive: true, diff --git a/packages/editor/lite-text-editor/package.json b/packages/editor/lite-text-editor/package.json index e033f620a6c..4ac62e3783b 100644 --- a/packages/editor/lite-text-editor/package.json +++ b/packages/editor/lite-text-editor/package.json @@ -1,6 +1,6 @@ { "name": "@plane/lite-text-editor", - "version": "0.16.0", + "version": "0.17.0", "description": "Package that powers Plane's Comment Editor", "private": true, "main": "./dist/index.mjs", @@ -36,10 +36,9 @@ "@types/node": "18.15.3", "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", - "eslint": "^7.32.0", + "eslint-config-custom": "*", "postcss": "^8.4.29", "tailwind-config-custom": "*", - "eslint-config-custom": "*", "tsconfig": "*", "tsup": "^7.2.0", "typescript": "4.9.5" diff --git a/packages/editor/lite-text-editor/src/ui/index.tsx b/packages/editor/lite-text-editor/src/ui/index.tsx index 57774ab5dc0..7986e0c6b3f 100644 --- a/packages/editor/lite-text-editor/src/ui/index.tsx +++ b/packages/editor/lite-text-editor/src/ui/index.tsx @@ -42,6 +42,7 @@ interface ILiteTextEditor { mentionHighlights?: string[]; mentionSuggestions?: IMentionSuggestion[]; submitButton?: React.ReactNode; + tabIndex?: number; } interface LiteTextEditorProps extends ILiteTextEditor { @@ -74,6 +75,7 @@ const LiteTextEditor = (props: LiteTextEditorProps) => { mentionHighlights, mentionSuggestions, submitButton, + tabIndex, } = props; const editor = useEditor({ @@ -103,7 +105,11 @@ const LiteTextEditor = (props: LiteTextEditorProps) => { return (
- +
{ const editor = useReadOnlyEditor({ value, @@ -45,7 +47,11 @@ const LiteReadOnlyEditor = ({ return (
- +
); diff --git a/packages/editor/rich-text-editor/package.json b/packages/editor/rich-text-editor/package.json index 0f3d0d8f70e..117cf5c41de 100644 --- a/packages/editor/rich-text-editor/package.json +++ b/packages/editor/rich-text-editor/package.json @@ -1,6 +1,6 @@ { "name": "@plane/rich-text-editor", - "version": "0.16.0", + "version": "0.17.0", "description": "Rich Text Editor that powers Plane", "private": true, "main": "./dist/index.mjs", @@ -39,7 +39,7 @@ "@types/node": "18.15.3", "@types/react": "^18.2.42", "@types/react-dom": "^18.2.17", - "eslint": "^7.32.0", + "eslint-config-custom": "*", "postcss": "^8.4.29", "react": "^18.2.0", "tailwind-config-custom": "*", diff --git a/packages/editor/rich-text-editor/src/ui/index.tsx b/packages/editor/rich-text-editor/src/ui/index.tsx index 4bcb340fd80..366fa471f0e 100644 --- a/packages/editor/rich-text-editor/src/ui/index.tsx +++ b/packages/editor/rich-text-editor/src/ui/index.tsx @@ -36,6 +36,7 @@ export type IRichTextEditor = { debouncedUpdatesEnabled?: boolean; mentionHighlights?: string[]; mentionSuggestions?: IMentionSuggestion[]; + tabIndex?: number; }; export interface RichTextEditorProps extends IRichTextEditor { @@ -45,6 +46,7 @@ export interface RichTextEditorProps extends IRichTextEditor { interface EditorHandle { clearEditor: () => void; setEditorValue: (content: string) => void; + setEditorValueAtCursorPosition: (content: string) => void; } const RichTextEditor = ({ @@ -67,6 +69,7 @@ const RichTextEditor = ({ mentionHighlights, rerenderOnPropsChange, mentionSuggestions, + tabIndex, }: RichTextEditorProps) => { const [hideDragHandleOnMouseLeave, setHideDragHandleOnMouseLeave] = React.useState<() => void>(() => {}); @@ -99,17 +102,21 @@ const RichTextEditor = ({ customClassName, }); - React.useEffect(() => { - if (editor && initialValue && editor.getHTML() != initialValue) editor.commands.setContent(initialValue); - }, [editor, initialValue]); - + // React.useEffect(() => { + // if (editor && initialValue && editor.getHTML() != initialValue) editor.commands.setContent(initialValue); + // }, [editor, initialValue]); + // if (!editor) return null; return ( {editor && }
- +
); diff --git a/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx index 2e7dd25b844..2dbc86cec68 100644 --- a/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx +++ b/packages/editor/rich-text-editor/src/ui/menus/bubble-menu/index.tsx @@ -25,16 +25,20 @@ type EditorBubbleMenuProps = Omit; export const EditorBubbleMenu: FC = (props: any) => { const items: BubbleMenuItem[] = [ - BoldItem(props.editor), - ItalicItem(props.editor), - UnderLineItem(props.editor), - StrikeThroughItem(props.editor), + ...(props.editor.isActive("code") + ? [] + : [ + BoldItem(props.editor), + ItalicItem(props.editor), + UnderLineItem(props.editor), + StrikeThroughItem(props.editor), + ]), CodeItem(props.editor), ]; const bubbleMenuProps: EditorBubbleMenuProps = { ...props, - shouldShow: ({ view, state, editor }) => { + shouldShow: ({ state, editor }) => { const { selection } = state; const { empty } = selection; @@ -64,6 +68,7 @@ export const EditorBubbleMenu: FC = (props: any) => { const [isLinkSelectorOpen, setIsLinkSelectorOpen] = useState(false); const [isSelecting, setIsSelecting] = useState(false); + useEffect(() => { function handleMouseDown() { function handleMouseMove() { @@ -108,20 +113,25 @@ export const EditorBubbleMenu: FC = (props: any) => { }} /> )} - { - setIsLinkSelectorOpen(!isLinkSelectorOpen); - setIsNodeSelectorOpen(false); - }} - /> + {!props.editor.isActive("code") && ( + { + setIsLinkSelectorOpen(!isLinkSelectorOpen); + setIsNodeSelectorOpen(false); + }} + /> + )}
{items.map((item) => ( + + +
+ tab.key === defaultOpen)} + > + + {TABS_LIST.map((tab) => ( + + cn("py-1 text-sm rounded border border-custom-border-200", { + "bg-custom-background-80": selected, + "hover:bg-custom-background-90 focus:bg-custom-background-90": !selected, + }) + } + > + {tab.title} + + ))} + + + + { + onChange({ + type: EmojiIconPickerTypes.EMOJI, + value: val, + }); + if (closeOnSelect) close(); + }} + height="20rem" + width="100%" + theme={theme} + searchPlaceholder={searchPlaceholder} + previewConfig={{ + showPreview: false, + }} + /> + + + { + onChange({ + type: EmojiIconPickerTypes.ICON, + value: val, + }); + if (closeOnSelect) close(); + }} + /> + + + +
+
+ + )} + + ); +}; diff --git a/packages/ui/src/emoji/icons-list.tsx b/packages/ui/src/emoji/icons-list.tsx new file mode 100644 index 00000000000..f55da881b47 --- /dev/null +++ b/packages/ui/src/emoji/icons-list.tsx @@ -0,0 +1,110 @@ +import React, { useEffect, useState } from "react"; +// components +import { Input } from "../form-fields"; +// helpers +import { cn } from "../../helpers"; +// constants +import { MATERIAL_ICONS_LIST } from "./icons"; + +type TIconsListProps = { + defaultColor: string; + onChange: (val: { name: string; color: string }) => void; +}; + +const DEFAULT_COLORS = ["#ff6b00", "#8cc1ff", "#fcbe1d", "#18904f", "#adf672", "#05c3ff", "#5f5f5f"]; + +export const IconsList: React.FC = (props) => { + const { defaultColor, onChange } = props; + // states + const [activeColor, setActiveColor] = useState(defaultColor); + const [showHexInput, setShowHexInput] = useState(false); + const [hexValue, setHexValue] = useState(""); + + useEffect(() => { + if (DEFAULT_COLORS.includes(defaultColor.toLowerCase())) setShowHexInput(false); + else { + setHexValue(defaultColor.slice(1, 7)); + setShowHexInput(true); + } + }, [defaultColor]); + + return ( + <> +
+ {showHexInput ? ( +
+ + HEX + # + { + const value = e.target.value; + setHexValue(value); + if (/^[0-9A-Fa-f]{6}$/.test(value)) setActiveColor(`#${value}`); + }} + className="flex-grow pl-0 text-xs text-custom-text-200" + mode="true-transparent" + autoFocus + /> +
+ ) : ( + DEFAULT_COLORS.map((curCol) => ( + + )) + )} + +
+
+ {MATERIAL_ICONS_LIST.map((icon) => ( + + ))} +
+ + ); +}; diff --git a/packages/ui/src/emoji/icons.ts b/packages/ui/src/emoji/icons.ts new file mode 100644 index 00000000000..72aacf18bb7 --- /dev/null +++ b/packages/ui/src/emoji/icons.ts @@ -0,0 +1,605 @@ +export const MATERIAL_ICONS_LIST = [ + { + name: "search", + }, + { + name: "home", + }, + { + name: "menu", + }, + { + name: "close", + }, + { + name: "settings", + }, + { + name: "done", + }, + { + name: "check_circle", + }, + { + name: "favorite", + }, + { + name: "add", + }, + { + name: "delete", + }, + { + name: "arrow_back", + }, + { + name: "star", + }, + { + name: "logout", + }, + { + name: "add_circle", + }, + { + name: "cancel", + }, + { + name: "arrow_drop_down", + }, + { + name: "more_vert", + }, + { + name: "check", + }, + { + name: "check_box", + }, + { + name: "toggle_on", + }, + { + name: "open_in_new", + }, + { + name: "refresh", + }, + { + name: "login", + }, + { + name: "radio_button_unchecked", + }, + { + name: "more_horiz", + }, + { + name: "apps", + }, + { + name: "radio_button_checked", + }, + { + name: "download", + }, + { + name: "remove", + }, + { + name: "toggle_off", + }, + { + name: "bolt", + }, + { + name: "arrow_upward", + }, + { + name: "filter_list", + }, + { + name: "delete_forever", + }, + { + name: "autorenew", + }, + { + name: "key", + }, + { + name: "sort", + }, + { + name: "sync", + }, + { + name: "add_box", + }, + { + name: "block", + }, + { + name: "restart_alt", + }, + { + name: "menu_open", + }, + { + name: "shopping_cart_checkout", + }, + { + name: "expand_circle_down", + }, + { + name: "backspace", + }, + { + name: "undo", + }, + { + name: "done_all", + }, + { + name: "do_not_disturb_on", + }, + { + name: "open_in_full", + }, + { + name: "double_arrow", + }, + { + name: "sync_alt", + }, + { + name: "zoom_in", + }, + { + name: "done_outline", + }, + { + name: "drag_indicator", + }, + { + name: "fullscreen", + }, + { + name: "star_half", + }, + { + name: "settings_accessibility", + }, + { + name: "reply", + }, + { + name: "exit_to_app", + }, + { + name: "unfold_more", + }, + { + name: "library_add", + }, + { + name: "cached", + }, + { + name: "select_check_box", + }, + { + name: "terminal", + }, + { + name: "change_circle", + }, + { + name: "disabled_by_default", + }, + { + name: "swap_horiz", + }, + { + name: "swap_vert", + }, + { + name: "app_registration", + }, + { + name: "download_for_offline", + }, + { + name: "close_fullscreen", + }, + { + name: "file_open", + }, + { + name: "minimize", + }, + { + name: "open_with", + }, + { + name: "dataset", + }, + { + name: "add_task", + }, + { + name: "start", + }, + { + name: "keyboard_voice", + }, + { + name: "create_new_folder", + }, + { + name: "forward", + }, + { + name: "download", + }, + { + name: "settings_applications", + }, + { + name: "compare_arrows", + }, + { + name: "redo", + }, + { + name: "zoom_out", + }, + { + name: "publish", + }, + { + name: "html", + }, + { + name: "token", + }, + { + name: "switch_access_shortcut", + }, + { + name: "fullscreen_exit", + }, + { + name: "sort_by_alpha", + }, + { + name: "delete_sweep", + }, + { + name: "indeterminate_check_box", + }, + { + name: "view_timeline", + }, + { + name: "settings_backup_restore", + }, + { + name: "arrow_drop_down_circle", + }, + { + name: "assistant_navigation", + }, + { + name: "sync_problem", + }, + { + name: "clear_all", + }, + { + name: "density_medium", + }, + { + name: "heart_plus", + }, + { + name: "filter_alt_off", + }, + { + name: "expand", + }, + { + name: "subdirectory_arrow_right", + }, + { + name: "download_done", + }, + { + name: "arrow_outward", + }, + { + name: "123", + }, + { + name: "swipe_left", + }, + { + name: "auto_mode", + }, + { + name: "saved_search", + }, + { + name: "place_item", + }, + { + name: "system_update_alt", + }, + { + name: "javascript", + }, + { + name: "search_off", + }, + { + name: "output", + }, + { + name: "select_all", + }, + { + name: "fit_screen", + }, + { + name: "swipe_up", + }, + { + name: "dynamic_form", + }, + { + name: "hide_source", + }, + { + name: "swipe_right", + }, + { + name: "switch_access_shortcut_add", + }, + { + name: "browse_gallery", + }, + { + name: "css", + }, + { + name: "density_small", + }, + { + name: "assistant_direction", + }, + { + name: "check_small", + }, + { + name: "youtube_searched_for", + }, + { + name: "move_up", + }, + { + name: "swap_horizontal_circle", + }, + { + name: "data_thresholding", + }, + { + name: "install_mobile", + }, + { + name: "move_down", + }, + { + name: "dataset_linked", + }, + { + name: "keyboard_command_key", + }, + { + name: "view_kanban", + }, + { + name: "swipe_down", + }, + { + name: "key_off", + }, + { + name: "transcribe", + }, + { + name: "send_time_extension", + }, + { + name: "swipe_down_alt", + }, + { + name: "swipe_left_alt", + }, + { + name: "swipe_right_alt", + }, + { + name: "swipe_up_alt", + }, + { + name: "keyboard_option_key", + }, + { + name: "cycle", + }, + { + name: "rebase", + }, + { + name: "rebase_edit", + }, + { + name: "empty_dashboard", + }, + { + name: "magic_exchange", + }, + { + name: "acute", + }, + { + name: "point_scan", + }, + { + name: "step_into", + }, + { + name: "cheer", + }, + { + name: "emoticon", + }, + { + name: "explosion", + }, + { + name: "water_bottle", + }, + { + name: "weather_hail", + }, + { + name: "syringe", + }, + { + name: "pill", + }, + { + name: "genetics", + }, + { + name: "allergy", + }, + { + name: "medical_mask", + }, + { + name: "body_fat", + }, + { + name: "barefoot", + }, + { + name: "infrared", + }, + { + name: "wrist", + }, + { + name: "metabolism", + }, + { + name: "conditions", + }, + { + name: "taunt", + }, + { + name: "altitude", + }, + { + name: "tibia", + }, + { + name: "footprint", + }, + { + name: "eyeglasses", + }, + { + name: "man_3", + }, + { + name: "woman_2", + }, + { + name: "rheumatology", + }, + { + name: "tornado", + }, + { + name: "landslide", + }, + { + name: "foggy", + }, + { + name: "severe_cold", + }, + { + name: "tsunami", + }, + { + name: "vape_free", + }, + { + name: "sign_language", + }, + { + name: "emoji_symbols", + }, + { + name: "clear_night", + }, + { + name: "emoji_food_beverage", + }, + { + name: "hive", + }, + { + name: "thunderstorm", + }, + { + name: "communication", + }, + { + name: "rocket", + }, + { + name: "pets", + }, + { + name: "public", + }, + { + name: "quiz", + }, + { + name: "mood", + }, + { + name: "gavel", + }, + { + name: "eco", + }, + { + name: "diamond", + }, + { + name: "forest", + }, + { + name: "rainy", + }, + { + name: "skull", + }, +]; diff --git a/packages/ui/src/emoji/index.ts b/packages/ui/src/emoji/index.ts new file mode 100644 index 00000000000..97345413903 --- /dev/null +++ b/packages/ui/src/emoji/index.ts @@ -0,0 +1 @@ +export * from "./emoji-icon-picker"; diff --git a/packages/ui/src/form-fields/input.tsx b/packages/ui/src/form-fields/input.tsx index 6688d6778af..f73467621b6 100644 --- a/packages/ui/src/form-fields/input.tsx +++ b/packages/ui/src/form-fields/input.tsx @@ -1,4 +1,6 @@ import * as React from "react"; +// helpers +import { cn } from "../../helpers"; export interface InputProps extends React.InputHTMLAttributes { mode?: "primary" | "transparent" | "true-transparent"; @@ -16,17 +18,20 @@ const Input = React.forwardRef((props, ref) => { ref={ref} type={type} name={name} - className={`block rounded-md bg-transparent text-sm placeholder-custom-text-400 focus:outline-none ${ - mode === "primary" - ? "rounded-md border-[0.5px] border-custom-border-200" - : mode === "transparent" - ? "rounded border-none bg-transparent ring-0 transition-all focus:ring-1 focus:ring-custom-primary" - : mode === "true-transparent" - ? "rounded border-none bg-transparent ring-0" - : "" - } ${hasError ? "border-red-500" : ""} ${hasError && mode === "primary" ? "bg-red-500/20" : ""} ${ - inputSize === "sm" ? "px-3 py-2" : inputSize === "md" ? "p-3" : "" - } ${className}`} + className={cn( + `block rounded-md bg-transparent text-sm placeholder-custom-text-400 focus:outline-none ${ + mode === "primary" + ? "rounded-md border-[0.5px] border-custom-border-200" + : mode === "transparent" + ? "rounded border-none bg-transparent ring-0 transition-all focus:ring-1 focus:ring-custom-primary" + : mode === "true-transparent" + ? "rounded border-none bg-transparent ring-0" + : "" + } ${hasError ? "border-red-500" : ""} ${hasError && mode === "primary" ? "bg-red-500/20" : ""} ${ + inputSize === "sm" ? "px-3 py-2" : inputSize === "md" ? "p-3" : "" + }`, + className + )} {...rest} /> ); diff --git a/packages/ui/src/icons/cycle/circle-dot-full-icon.tsx b/packages/ui/src/icons/cycle/circle-dot-full-icon.tsx index 47c90e72b60..dd063e79ca5 100644 --- a/packages/ui/src/icons/cycle/circle-dot-full-icon.tsx +++ b/packages/ui/src/icons/cycle/circle-dot-full-icon.tsx @@ -4,7 +4,7 @@ import { ISvgIcons } from "../type"; export const CircleDotFullIcon: React.FC = ({ className = "text-current", ...rest }) => ( - + ); diff --git a/packages/ui/src/index.ts b/packages/ui/src/index.ts index b90b6993a77..24b76c3e0e4 100644 --- a/packages/ui/src/index.ts +++ b/packages/ui/src/index.ts @@ -2,6 +2,7 @@ export * from "./avatar"; export * from "./breadcrumbs"; export * from "./badge"; export * from "./button"; +export * from "./emoji"; export * from "./dropdowns"; export * from "./form-fields"; export * from "./icons"; @@ -10,3 +11,4 @@ export * from "./spinners"; export * from "./tooltip"; export * from "./loader"; export * from "./control-link"; +export * from "./toast"; diff --git a/packages/ui/src/spinners/circular-bar-spinner.tsx b/packages/ui/src/spinners/circular-bar-spinner.tsx new file mode 100644 index 00000000000..3be8af43aad --- /dev/null +++ b/packages/ui/src/spinners/circular-bar-spinner.tsx @@ -0,0 +1,35 @@ +import * as React from "react"; + +interface ICircularBarSpinner extends React.SVGAttributes { + height?: string; + width?: string; + className?: string | undefined; +} + +export const CircularBarSpinner: React.FC = ({ + height = "16px", + width = "16px", + className = "", +}) => ( +
+ + + + + + + + + + + + +
+); diff --git a/packages/ui/src/spinners/index.ts b/packages/ui/src/spinners/index.ts index 76856817261..a871a9b77b8 100644 --- a/packages/ui/src/spinners/index.ts +++ b/packages/ui/src/spinners/index.ts @@ -1 +1,2 @@ export * from "./circular-spinner"; +export * from "./circular-bar-spinner"; diff --git a/packages/ui/src/toast/index.tsx b/packages/ui/src/toast/index.tsx new file mode 100644 index 00000000000..f380505326a --- /dev/null +++ b/packages/ui/src/toast/index.tsx @@ -0,0 +1,210 @@ +import * as React from "react"; +import { Toaster, toast } from "sonner"; +// icons +import { AlertTriangle, CheckCircle2, X, XCircle } from "lucide-react"; +// spinner +import { CircularBarSpinner } from "../spinners"; +// helper +import { cn } from "../../helpers"; + +export enum TOAST_TYPE { + SUCCESS = "success", + ERROR = "error", + INFO = "info", + WARNING = "warning", + LOADING = "loading", +} + +type SetToastProps = + | { + type: TOAST_TYPE.LOADING; + title?: string; + } + | { + id?: string | number; + type: Exclude; + title: string; + message?: string; + }; + +type PromiseToastCallback = (data: ToastData) => string; + +type PromiseToastData = { + title: string; + message?: PromiseToastCallback; +}; + +type PromiseToastOptions = { + loading?: string; + success: PromiseToastData; + error: PromiseToastData; +}; + +type ToastContentProps = { + toastId: string | number; + icon?: React.ReactNode; + textColorClassName: string; + backgroundColorClassName: string; + borderColorClassName: string; +}; + +type ToastProps = { + theme: "light" | "dark" | "system"; +}; + +export const Toast = (props: ToastProps) => { + const { theme } = props; + return ; +}; + +export const setToast = (props: SetToastProps) => { + const renderToastContent = ({ + toastId, + icon, + textColorClassName, + backgroundColorClassName, + borderColorClassName, + }: ToastContentProps) => + props.type === TOAST_TYPE.LOADING ? ( +
{ + e.stopPropagation(); + e.preventDefault(); + }} + className={cn( + "w-[350px] h-[67.3px] rounded-lg border shadow-sm p-2", + backgroundColorClassName, + borderColorClassName + )} + > +
+ {icon &&
{icon}
} +
+
{props.title ?? "Loading..."}
+
+ toast.dismiss(toastId)} + /> +
+
+
+
+ ) : ( +
{ + e.stopPropagation(); + e.preventDefault(); + }} + className={cn( + "relative flex flex-col w-[350px] rounded-lg border shadow-sm p-2", + backgroundColorClassName, + borderColorClassName + )} + > + toast.dismiss(toastId)} + /> +
+ {icon &&
{icon}
} +
+
{props.title}
+ {props.message &&
{props.message}
} +
+
+
+ ); + + switch (props.type) { + case TOAST_TYPE.SUCCESS: + return toast.custom( + (toastId) => + renderToastContent({ + toastId, + icon: , + textColorClassName: "text-toast-text-success", + backgroundColorClassName: "bg-toast-background-success", + borderColorClassName: "border-toast-border-success", + }), + props.id ? { id: props.id } : {} + ); + case TOAST_TYPE.ERROR: + return toast.custom( + (toastId) => + renderToastContent({ + toastId, + icon: , + textColorClassName: "text-toast-text-error", + backgroundColorClassName: "bg-toast-background-error", + borderColorClassName: "border-toast-border-error", + }), + props.id ? { id: props.id } : {} + ); + case TOAST_TYPE.WARNING: + return toast.custom( + (toastId) => + renderToastContent({ + toastId, + icon: , + textColorClassName: "text-toast-text-warning", + backgroundColorClassName: "bg-toast-background-warning", + borderColorClassName: "border-toast-border-warning", + }), + props.id ? { id: props.id } : {} + ); + case TOAST_TYPE.INFO: + return toast.custom( + (toastId) => + renderToastContent({ + toastId, + textColorClassName: "text-toast-text-info", + backgroundColorClassName: "bg-toast-background-info", + borderColorClassName: "border-toast-border-info", + }), + props.id ? { id: props.id } : {} + ); + + case TOAST_TYPE.LOADING: + return toast.custom((toastId) => + renderToastContent({ + toastId, + icon: , + textColorClassName: "text-toast-text-loading", + backgroundColorClassName: "bg-toast-background-loading", + borderColorClassName: "border-toast-border-loading", + }) + ); + } +}; + +export const setPromiseToast = ( + promise: Promise, + options: PromiseToastOptions +): void => { + const tId = setToast({ type: TOAST_TYPE.LOADING, title: options.loading }); + + promise + .then((data: ToastData) => { + setToast({ + type: TOAST_TYPE.SUCCESS, + id: tId, + title: options.success.title, + message: options.success.message?.(data), + }); + }) + .catch((data: ToastData) => { + setToast({ + type: TOAST_TYPE.ERROR, + id: tId, + title: options.error.title, + message: options.error.message?.(data), + }); + }); +}; diff --git a/packages/ui/src/tooltip/tooltip.tsx b/packages/ui/src/tooltip/tooltip.tsx index 65d014efe7b..ca176918679 100644 --- a/packages/ui/src/tooltip/tooltip.tsx +++ b/packages/ui/src/tooltip/tooltip.tsx @@ -29,6 +29,7 @@ interface ITooltipProps { className?: string; openDelay?: number; closeDelay?: number; + isMobile?: boolean; } export const Tooltip: React.FC = ({ @@ -40,6 +41,7 @@ export const Tooltip: React.FC = ({ className = "", openDelay = 200, closeDelay, + isMobile = false, }) => ( = ({ hoverCloseDelay={closeDelay} content={
{tooltipHeading &&
{tooltipHeading}
} {tooltipContent} diff --git a/space/.eslintrc.js b/space/.eslintrc.js index c8df607506c..57d39bcfad1 100644 --- a/space/.eslintrc.js +++ b/space/.eslintrc.js @@ -1,4 +1,52 @@ module.exports = { root: true, extends: ["custom"], + parser: "@typescript-eslint/parser", + settings: { + "import/resolver": { + typescript: {}, + node: { + moduleDirectory: ["node_modules", "."], + }, + }, + }, + rules: { + "import/order": [ + "error", + { + groups: ["builtin", "external", "internal", "parent", "sibling",], + pathGroups: [ + { + pattern: "react", + group: "external", + position: "before", + }, + { + pattern: "lucide-react", + group: "external", + position: "after", + }, + { + pattern: "@headlessui/**", + group: "external", + position: "after", + }, + { + pattern: "@plane/**", + group: "external", + position: "after", + }, + { + pattern: "@/**", + group: "internal", + } + ], + pathGroupsExcludedImportTypes: ["builtin", "internal", "react"], + alphabetize: { + order: "asc", + caseInsensitive: true, + }, + }, + ], + }, }; diff --git a/space/components/accounts/github-sign-in.tsx b/space/components/accounts/github-sign-in.tsx index 322011ae3c2..3b9b3f71b65 100644 --- a/space/components/accounts/github-sign-in.tsx +++ b/space/components/accounts/github-sign-in.tsx @@ -1,6 +1,6 @@ import { useEffect, useState, FC } from "react"; -import Link from "next/link"; import Image from "next/image"; +import Link from "next/link"; import { useRouter } from "next/router"; // next-themes import { useTheme } from "next-themes"; diff --git a/space/components/accounts/onboarding-form.tsx b/space/components/accounts/onboarding-form.tsx index 644f732f3bf..4ebb69b75e8 100644 --- a/space/components/accounts/onboarding-form.tsx +++ b/space/components/accounts/onboarding-form.tsx @@ -1,19 +1,19 @@ import { useEffect, Fragment } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { Controller, useForm } from "react-hook-form"; -import { Listbox, Transition } from "@headlessui/react"; import { Check, ChevronDown } from "lucide-react"; +import { Listbox, Transition } from "@headlessui/react"; // mobx store -import { useMobxStore } from "lib/mobx/store-provider"; +import { Button, Input } from "@plane/ui"; +import { USER_ROLES } from "@/constants/workspace"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // constants -import { USER_ROLES } from "constants/workspace"; // hooks +import { UserService } from "@/services/user.service"; import useToast from "hooks/use-toast"; // services -import { UserService } from "services/user.service"; // ui -import { Button, Input } from "@plane/ui"; const defaultValues = { first_name: "", diff --git a/space/components/accounts/sign-in-forms/create-password.tsx b/space/components/accounts/sign-in-forms/create-password.tsx index 55205e70757..ffcdfe7ba62 100644 --- a/space/components/accounts/sign-in-forms/create-password.tsx +++ b/space/components/accounts/sign-in-forms/create-password.tsx @@ -2,15 +2,15 @@ import React, { useEffect } from "react"; import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; // services -import { AuthService } from "services/authentication.service"; +import { Button, Input } from "@plane/ui"; +import { ESignInSteps } from "@/components/accounts"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; // hooks import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // constants -import { ESignInSteps } from "components/accounts"; type Props = { email: string; diff --git a/space/components/accounts/sign-in-forms/email-form.tsx b/space/components/accounts/sign-in-forms/email-form.tsx index 4f8ed429470..ca702320c42 100644 --- a/space/components/accounts/sign-in-forms/email-form.tsx +++ b/space/components/accounts/sign-in-forms/email-form.tsx @@ -2,17 +2,17 @@ import React, { useEffect } from "react"; import { Controller, useForm } from "react-hook-form"; import { XCircle } from "lucide-react"; // services -import { AuthService } from "services/authentication.service"; +import { Button, Input } from "@plane/ui"; +import { ESignInSteps } from "@/components/accounts"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; // hooks import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types import { IEmailCheckData } from "types/auth"; // constants -import { ESignInSteps } from "components/accounts"; type Props = { handleStepChange: (step: ESignInSteps) => void; diff --git a/space/components/accounts/sign-in-forms/o-auth-options.tsx b/space/components/accounts/sign-in-forms/o-auth-options.tsx index 28da6fb7d6b..b546d9d3f95 100644 --- a/space/components/accounts/sign-in-forms/o-auth-options.tsx +++ b/space/components/accounts/sign-in-forms/o-auth-options.tsx @@ -1,13 +1,13 @@ +import { observer } from "mobx-react-lite"; import useSWR from "swr"; -import { observer } from "mobx-react-lite"; // services -import { AuthService } from "services/authentication.service"; -import { AppConfigService } from "services/app-config.service"; +import { GitHubSignInButton, GoogleSignInButton } from "@/components/accounts"; +import { AppConfigService } from "@/services/app-config.service"; +import { AuthService } from "@/services/authentication.service"; // hooks import useToast from "hooks/use-toast"; // components -import { GitHubSignInButton, GoogleSignInButton } from "components/accounts"; type Props = { handleSignInRedirection: () => Promise; diff --git a/space/components/accounts/sign-in-forms/optional-set-password.tsx b/space/components/accounts/sign-in-forms/optional-set-password.tsx index 2199717598a..082a238d92d 100644 --- a/space/components/accounts/sign-in-forms/optional-set-password.tsx +++ b/space/components/accounts/sign-in-forms/optional-set-password.tsx @@ -4,9 +4,9 @@ import { Controller, useForm } from "react-hook-form"; // ui import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; +import { ESignInSteps } from "@/components/accounts"; +import { checkEmailValidity } from "@/helpers/string.helper"; // constants -import { ESignInSteps } from "components/accounts"; type Props = { email: string; diff --git a/space/components/accounts/sign-in-forms/password.tsx b/space/components/accounts/sign-in-forms/password.tsx index f909f16c5d5..67863e010f4 100644 --- a/space/components/accounts/sign-in-forms/password.tsx +++ b/space/components/accounts/sign-in-forms/password.tsx @@ -3,17 +3,17 @@ import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; import { XCircle } from "lucide-react"; // services -import { AuthService } from "services/authentication.service"; +import { Button, Input } from "@plane/ui"; +import { ESignInSteps } from "@/components/accounts"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; // hooks import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types import { IPasswordSignInData } from "types/auth"; // constants -import { ESignInSteps } from "components/accounts"; type Props = { email: string; diff --git a/space/components/accounts/sign-in-forms/root.tsx b/space/components/accounts/sign-in-forms/root.tsx index c36842ce7a1..4f7859d7652 100644 --- a/space/components/accounts/sign-in-forms/root.tsx +++ b/space/components/accounts/sign-in-forms/root.tsx @@ -2,11 +2,6 @@ import React, { useState } from "react"; import { observer } from "mobx-react-lite"; import useSWR from "swr"; // hooks -import useSignInRedirection from "hooks/use-sign-in-redirection"; -// services -import { AppConfigService } from "services/app-config.service"; -// components -import { LatestFeatureBlock } from "components/common"; import { EmailForm, UniqueCodeForm, @@ -16,7 +11,12 @@ import { OptionalSetPasswordForm, CreatePasswordForm, SelfHostedSignInForm, -} from "components/accounts"; +} from "@/components/accounts"; +import { LatestFeatureBlock } from "@/components/common"; +import { AppConfigService } from "@/services/app-config.service"; +import useSignInRedirection from "hooks/use-sign-in-redirection"; +// services +// components export enum ESignInSteps { EMAIL = "EMAIL", diff --git a/space/components/accounts/sign-in-forms/self-hosted-sign-in.tsx b/space/components/accounts/sign-in-forms/self-hosted-sign-in.tsx index af1e5d68f6a..ddea6bf0075 100644 --- a/space/components/accounts/sign-in-forms/self-hosted-sign-in.tsx +++ b/space/components/accounts/sign-in-forms/self-hosted-sign-in.tsx @@ -3,13 +3,13 @@ import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; import { XCircle } from "lucide-react"; // services -import { AuthService } from "services/authentication.service"; +import { Button, Input } from "@plane/ui"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; // hooks import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types import { IPasswordSignInData } from "types/auth"; diff --git a/space/components/accounts/sign-in-forms/set-password-link.tsx b/space/components/accounts/sign-in-forms/set-password-link.tsx index 0b5ad21d9c6..4b2e41c78d5 100644 --- a/space/components/accounts/sign-in-forms/set-password-link.tsx +++ b/space/components/accounts/sign-in-forms/set-password-link.tsx @@ -1,13 +1,13 @@ import React from "react"; import { Controller, useForm } from "react-hook-form"; // services -import { AuthService } from "services/authentication.service"; +import { Button, Input } from "@plane/ui"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; // hooks import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types import { IEmailCheckData } from "types/auth"; diff --git a/space/components/accounts/sign-in-forms/unique-code.tsx b/space/components/accounts/sign-in-forms/unique-code.tsx index 4c61fa1513f..f0dcbba474f 100644 --- a/space/components/accounts/sign-in-forms/unique-code.tsx +++ b/space/components/accounts/sign-in-forms/unique-code.tsx @@ -3,19 +3,19 @@ import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; import { CornerDownLeft, XCircle } from "lucide-react"; // services -import { AuthService } from "services/authentication.service"; -import { UserService } from "services/user.service"; +import { Button, Input } from "@plane/ui"; +import { ESignInSteps } from "@/components/accounts"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; +import { UserService } from "@/services/user.service"; // hooks -import useToast from "hooks/use-toast"; import useTimer from "hooks/use-timer"; +import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types import { IEmailCheckData, IMagicSignInData } from "types/auth"; // constants -import { ESignInSteps } from "components/accounts"; type Props = { email: string; diff --git a/space/components/accounts/user-logged-in.tsx b/space/components/accounts/user-logged-in.tsx index de0b18d51f6..f9f5aeb39a5 100644 --- a/space/components/accounts/user-logged-in.tsx +++ b/space/components/accounts/user-logged-in.tsx @@ -1,10 +1,10 @@ import Image from "next/image"; // mobx -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // assets -import UserLoggedInImage from "public/user-logged-in.svg"; import PlaneLogo from "public/plane-logos/black-horizontal-with-blue-logo.svg"; +import UserLoggedInImage from "public/user-logged-in.svg"; export const UserLoggedIn = () => { const { user: userStore } = useMobxStore(); diff --git a/space/components/common/index.ts b/space/components/common/index.ts index f1c0b088e85..36cc3c898b5 100644 --- a/space/components/common/index.ts +++ b/space/components/common/index.ts @@ -1 +1,2 @@ export * from "./latest-feature-block"; +export * from "./project-logo"; diff --git a/space/components/common/project-logo.tsx b/space/components/common/project-logo.tsx new file mode 100644 index 00000000000..9b69e96167d --- /dev/null +++ b/space/components/common/project-logo.tsx @@ -0,0 +1,34 @@ +// helpers +import { TProjectLogoProps } from "@plane/types"; +import { cn } from "@/helpers/common.helper"; +// types + +type Props = { + className?: string; + logo: TProjectLogoProps; +}; + +export const ProjectLogo: React.FC = (props) => { + const { className, logo } = props; + + if (logo.in_use === "icon" && logo.icon) + return ( + + {logo.icon.name} + + ); + + if (logo.in_use === "emoji" && logo.emoji) + return ( + + {logo.emoji.value?.split("-").map((emoji) => String.fromCodePoint(parseInt(emoji, 10)))} + + ); + + return ; +}; diff --git a/space/components/issues/board-views/block-due-date.tsx b/space/components/issues/board-views/block-due-date.tsx index 2920100a871..ecf2295621a 100644 --- a/space/components/issues/board-views/block-due-date.tsx +++ b/space/components/issues/board-views/block-due-date.tsx @@ -1,7 +1,7 @@ "use client"; // helpers -import { renderFullDate } from "helpers/date-time.helper"; +import { renderFullDate } from "@/helpers/date-time.helper"; export const dueDateIconDetails = ( date: string, diff --git a/space/components/issues/board-views/block-priority.tsx b/space/components/issues/board-views/block-priority.tsx index 6b5e6ad9d6a..9bfa3808b05 100644 --- a/space/components/issues/board-views/block-priority.tsx +++ b/space/components/issues/board-views/block-priority.tsx @@ -1,9 +1,9 @@ "use client"; // types +import { issuePriorityFilter } from "@/constants/data"; import { TIssuePriorityKey } from "types/issue"; // constants -import { issuePriorityFilter } from "constants/data"; export const IssueBlockPriority = ({ priority }: { priority: TIssuePriorityKey | null }) => { const priority_detail = priority != null ? issuePriorityFilter(priority) : null; diff --git a/space/components/issues/board-views/block-state.tsx b/space/components/issues/board-views/block-state.tsx index 4afdbb99048..bc444445d01 100644 --- a/space/components/issues/board-views/block-state.tsx +++ b/space/components/issues/board-views/block-state.tsx @@ -1,7 +1,7 @@ // ui import { StateGroupIcon } from "@plane/ui"; // constants -import { issueGroupFilter } from "constants/data"; +import { issueGroupFilter } from "@/constants/data"; export const IssueBlockState = ({ state }: any) => { const stateGroup = issueGroupFilter(state.group); diff --git a/space/components/issues/board-views/kanban/block.tsx b/space/components/issues/board-views/kanban/block.tsx index 1a7eb9105df..1284f794925 100644 --- a/space/components/issues/board-views/kanban/block.tsx +++ b/space/components/issues/board-views/kanban/block.tsx @@ -2,16 +2,16 @@ // mobx react lite import { observer } from "mobx-react-lite"; -import { useMobxStore } from "lib/mobx/store-provider"; +import { useRouter } from "next/router"; +import { IssueBlockDueDate } from "@/components/issues/board-views/block-due-date"; +import { IssueBlockPriority } from "@/components/issues/board-views/block-priority"; +import { IssueBlockState } from "@/components/issues/board-views/block-state"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // components -import { IssueBlockPriority } from "components/issues/board-views/block-priority"; -import { IssueBlockState } from "components/issues/board-views/block-state"; -import { IssueBlockDueDate } from "components/issues/board-views/block-due-date"; // interfaces +import { RootStore } from "@/store/root"; import { IIssue } from "types/issue"; -import { RootStore } from "store/root"; -import { useRouter } from "next/router"; export const IssueKanBanBlock = observer(({ issue }: { issue: IIssue }) => { const { project: projectStore, issueDetails: issueDetailStore }: RootStore = useMobxStore(); diff --git a/space/components/issues/board-views/kanban/header.tsx b/space/components/issues/board-views/kanban/header.tsx index a3c623306be..ac15dc95c86 100644 --- a/space/components/issues/board-views/kanban/header.tsx +++ b/space/components/issues/board-views/kanban/header.tsx @@ -1,14 +1,14 @@ // mobx react lite import { observer } from "mobx-react-lite"; // interfaces -import { IIssueState } from "types/issue"; // constants -import { issueGroupFilter } from "constants/data"; -// ui import { StateGroupIcon } from "@plane/ui"; +import { issueGroupFilter } from "@/constants/data"; +// ui // mobx hook -import { useMobxStore } from "lib/mobx/store-provider"; -import { RootStore } from "store/root"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; +import { IIssueState } from "types/issue"; export const IssueKanBanHeader = observer(({ state }: { state: IIssueState }) => { const store: RootStore = useMobxStore(); diff --git a/space/components/issues/board-views/kanban/index.tsx b/space/components/issues/board-views/kanban/index.tsx index 381c340da30..a7f3fc6f6ff 100644 --- a/space/components/issues/board-views/kanban/index.tsx +++ b/space/components/issues/board-views/kanban/index.tsx @@ -3,15 +3,15 @@ // mobx react lite import { observer } from "mobx-react-lite"; // components -import { IssueKanBanHeader } from "components/issues/board-views/kanban/header"; -import { IssueKanBanBlock } from "components/issues/board-views/kanban/block"; +import { IssueKanBanBlock } from "@/components/issues/board-views/kanban/block"; +import { IssueKanBanHeader } from "@/components/issues/board-views/kanban/header"; // ui -import { Icon } from "components/ui"; +import { Icon } from "@/components/ui"; // interfaces -import { IIssueState, IIssue } from "types/issue"; // mobx hook -import { useMobxStore } from "lib/mobx/store-provider"; -import { RootStore } from "store/root"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; +import { IIssueState, IIssue } from "types/issue"; export const IssueKanbanView = observer(() => { const store: RootStore = useMobxStore(); diff --git a/space/components/issues/board-views/list/block.tsx b/space/components/issues/board-views/list/block.tsx index 039d731b5dd..4a980aba429 100644 --- a/space/components/issues/board-views/list/block.tsx +++ b/space/components/issues/board-views/list/block.tsx @@ -1,17 +1,17 @@ import { FC } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // components -import { IssueBlockPriority } from "components/issues/board-views/block-priority"; -import { IssueBlockState } from "components/issues/board-views/block-state"; -import { IssueBlockLabels } from "components/issues/board-views/block-labels"; -import { IssueBlockDueDate } from "components/issues/board-views/block-due-date"; +import { IssueBlockDueDate } from "@/components/issues/board-views/block-due-date"; +import { IssueBlockLabels } from "@/components/issues/board-views/block-labels"; +import { IssueBlockPriority } from "@/components/issues/board-views/block-priority"; +import { IssueBlockState } from "@/components/issues/board-views/block-state"; // mobx hook -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // interfaces +import { RootStore } from "@/store/root"; import { IIssue } from "types/issue"; // store -import { RootStore } from "store/root"; export const IssueListBlock: FC<{ issue: IIssue }> = observer((props) => { const { issue } = props; diff --git a/space/components/issues/board-views/list/header.tsx b/space/components/issues/board-views/list/header.tsx index 82b7ac26f7c..cc0a88f5dfe 100644 --- a/space/components/issues/board-views/list/header.tsx +++ b/space/components/issues/board-views/list/header.tsx @@ -1,14 +1,14 @@ // mobx react lite import { observer } from "mobx-react-lite"; // interfaces -import { IIssueState } from "types/issue"; // ui import { StateGroupIcon } from "@plane/ui"; // constants -import { issueGroupFilter } from "constants/data"; +import { issueGroupFilter } from "@/constants/data"; // mobx hook -import { useMobxStore } from "lib/mobx/store-provider"; -import { RootStore } from "store/root"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; +import { IIssueState } from "types/issue"; export const IssueListHeader = observer(({ state }: { state: IIssueState }) => { const store: RootStore = useMobxStore(); diff --git a/space/components/issues/board-views/list/index.tsx b/space/components/issues/board-views/list/index.tsx index 441935def34..65d79eaacb9 100644 --- a/space/components/issues/board-views/list/index.tsx +++ b/space/components/issues/board-views/list/index.tsx @@ -1,13 +1,13 @@ import { observer } from "mobx-react-lite"; // components -import { IssueListHeader } from "components/issues/board-views/list/header"; -import { IssueListBlock } from "components/issues/board-views/list/block"; +import { IssueListBlock } from "@/components/issues/board-views/list/block"; +import { IssueListHeader } from "@/components/issues/board-views/list/header"; // interfaces -import { IIssueState, IIssue } from "types/issue"; // mobx hook -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // store -import { RootStore } from "store/root"; +import { RootStore } from "@/store/root"; +import { IIssueState, IIssue } from "types/issue"; export const IssueListView = observer(() => { const { issue: issueStore }: RootStore = useMobxStore(); diff --git a/space/components/issues/filters/applied-filters/filters-list.tsx b/space/components/issues/filters/applied-filters/filters-list.tsx index c971dbfe786..a036cfedff3 100644 --- a/space/components/issues/filters/applied-filters/filters-list.tsx +++ b/space/components/issues/filters/applied-filters/filters-list.tsx @@ -1,11 +1,11 @@ // components -import { AppliedPriorityFilters } from "./priority"; -import { AppliedStateFilters } from "./state"; // icons import { X } from "lucide-react"; // helpers -import { IIssueFilterOptions } from "store/issues/types"; +import { IIssueFilterOptions } from "@/store/issues/types"; import { IIssueLabel, IIssueState } from "types/issue"; +import { AppliedPriorityFilters } from "./priority"; +import { AppliedStateFilters } from "./state"; // types type Props = { diff --git a/space/components/issues/filters/applied-filters/priority.tsx b/space/components/issues/filters/applied-filters/priority.tsx index 79264ac7cf5..bbe72e4046e 100644 --- a/space/components/issues/filters/applied-filters/priority.tsx +++ b/space/components/issues/filters/applied-filters/priority.tsx @@ -1,5 +1,5 @@ -import { PriorityIcon } from "@plane/ui"; import { X } from "lucide-react"; +import { PriorityIcon } from "@plane/ui"; type Props = { handleRemove: (val: string) => void; diff --git a/space/components/issues/filters/applied-filters/root.tsx b/space/components/issues/filters/applied-filters/root.tsx index a7afaa67315..6adc7c261bf 100644 --- a/space/components/issues/filters/applied-filters/root.tsx +++ b/space/components/issues/filters/applied-filters/root.tsx @@ -1,12 +1,12 @@ import { FC, useCallback } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // components -import { AppliedFiltersList } from "./filters-list"; // store -import { useMobxStore } from "lib/mobx/store-provider"; -import { RootStore } from "store/root"; -import { IIssueFilterOptions } from "store/issues/types"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { IIssueFilterOptions } from "@/store/issues/types"; +import { RootStore } from "@/store/root"; +import { AppliedFiltersList } from "./filters-list"; export const IssueAppliedFilters: FC = observer(() => { const router = useRouter(); diff --git a/space/components/issues/filters/helpers/dropdown.tsx b/space/components/issues/filters/helpers/dropdown.tsx index 809cd6f5aca..d98dee7ddbf 100644 --- a/space/components/issues/filters/helpers/dropdown.tsx +++ b/space/components/issues/filters/helpers/dropdown.tsx @@ -1,7 +1,7 @@ import React, { Fragment, useState } from "react"; +import { Placement } from "@popperjs/core"; import { usePopper } from "react-popper"; import { Popover, Transition } from "@headlessui/react"; -import { Placement } from "@popperjs/core"; // ui import { Button } from "@plane/ui"; diff --git a/space/components/issues/filters/labels.tsx b/space/components/issues/filters/labels.tsx index c8dd2132ba1..e5cb1269605 100644 --- a/space/components/issues/filters/labels.tsx +++ b/space/components/issues/filters/labels.tsx @@ -1,11 +1,11 @@ import React, { useState } from "react"; // components -import { FilterHeader, FilterOption } from "./helpers"; // ui import { Loader } from "@plane/ui"; // types import { IIssueLabel } from "types/issue"; +import { FilterHeader, FilterOption } from "./helpers"; const LabelIcons = ({ color }: { color: string }) => ( diff --git a/space/components/issues/filters/priority.tsx b/space/components/issues/filters/priority.tsx index 8a5a643f47a..98c35eea6e7 100644 --- a/space/components/issues/filters/priority.tsx +++ b/space/components/issues/filters/priority.tsx @@ -3,9 +3,9 @@ import { observer } from "mobx-react-lite"; // ui import { PriorityIcon } from "@plane/ui"; // components +import { issuePriorityFilters } from "@/constants/data"; import { FilterHeader, FilterOption } from "./helpers"; // constants -import { issuePriorityFilters } from "constants/data"; type Props = { appliedFilters: string[] | null; diff --git a/space/components/issues/filters/root.tsx b/space/components/issues/filters/root.tsx index b45dfffe308..489cf584bc7 100644 --- a/space/components/issues/filters/root.tsx +++ b/space/components/issues/filters/root.tsx @@ -1,16 +1,16 @@ import { FC, useCallback } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // components +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { ISSUE_DISPLAY_FILTERS_BY_LAYOUT } from "@/store/issues/helpers"; +import { IIssueFilterOptions } from "@/store/issues/types"; +import { RootStore } from "@/store/root"; import { FiltersDropdown } from "./helpers/dropdown"; import { FilterSelection } from "./selection"; // types -import { IIssueFilterOptions } from "store/issues/types"; // helpers -import { ISSUE_DISPLAY_FILTERS_BY_LAYOUT } from "store/issues/helpers"; // store -import { RootStore } from "store/root"; -import { useMobxStore } from "lib/mobx/store-provider"; export const IssueFiltersDropdown: FC = observer(() => { const router = useRouter(); diff --git a/space/components/issues/filters/selection.tsx b/space/components/issues/filters/selection.tsx index b4491219036..71f0c5f1bb3 100644 --- a/space/components/issues/filters/selection.tsx +++ b/space/components/issues/filters/selection.tsx @@ -2,13 +2,13 @@ import React, { useState } from "react"; import { observer } from "mobx-react-lite"; import { Search, X } from "lucide-react"; // components -import { FilterPriority, FilterState } from "./"; // types // filter helpers -import { ILayoutDisplayFiltersOptions } from "store/issues/helpers"; -import { IIssueFilterOptions } from "store/issues/types"; +import { ILayoutDisplayFiltersOptions } from "@/store/issues/helpers"; +import { IIssueFilterOptions } from "@/store/issues/types"; import { IIssueState, IIssueLabel } from "types/issue"; +import { FilterPriority, FilterState } from "./"; type Props = { filters: IIssueFilterOptions; diff --git a/space/components/issues/filters/state.tsx b/space/components/issues/filters/state.tsx index a3781386d51..734abef55fb 100644 --- a/space/components/issues/filters/state.tsx +++ b/space/components/issues/filters/state.tsx @@ -1,10 +1,10 @@ import React, { useState } from "react"; // components -import { FilterHeader, FilterOption } from "./helpers"; // ui import { Loader, StateGroupIcon } from "@plane/ui"; // types import { IIssueState } from "types/issue"; +import { FilterHeader, FilterOption } from "./helpers"; type Props = { appliedFilters: string[] | null; diff --git a/space/components/issues/navbar/index.tsx b/space/components/issues/navbar/index.tsx index 0bc493b16f0..409c7b2865d 100644 --- a/space/components/issues/navbar/index.tsx +++ b/space/components/issues/navbar/index.tsx @@ -1,35 +1,20 @@ import { useEffect } from "react"; - +import { observer } from "mobx-react-lite"; import Link from "next/link"; import { useRouter } from "next/router"; - -// mobx -import { observer } from "mobx-react-lite"; // components -// import { NavbarSearch } from "./search"; -import { NavbarIssueBoardView } from "./issue-board-view"; -import { NavbarTheme } from "./theme"; -import { IssueFiltersDropdown } from "components/issues/filters"; -// ui -import { Avatar, Button } from "@plane/ui"; import { Briefcase } from "lucide-react"; +import { Avatar, Button } from "@plane/ui"; +import { ProjectLogo } from "@/components/common"; +import { IssueFiltersDropdown } from "@/components/issues/filters"; +// ui // lib -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // store -import { RootStore } from "store/root"; +import { RootStore } from "@/store/root"; import { TIssueBoardKeys } from "types/issue"; - -const renderEmoji = (emoji: string | { name: string; color: string }) => { - if (!emoji) return; - - if (typeof emoji === "object") - return ( - - {emoji.name} - - ); - else return isNaN(parseInt(emoji)) ? emoji : String.fromCodePoint(parseInt(emoji)); -}; +import { NavbarIssueBoardView } from "./issue-board-view"; +import { NavbarTheme } from "./theme"; const IssueNavbar = observer(() => { const { @@ -123,27 +108,15 @@ const IssueNavbar = observer(() => {
{/* project detail */}
-
- {projectStore.project ? ( - projectStore.project?.emoji ? ( - - {renderEmoji(projectStore.project.emoji)} - - ) : projectStore.project?.icon_prop ? ( -
- {renderEmoji(projectStore.project.icon_prop)} -
- ) : ( - - {projectStore.project?.name.charAt(0)} - - ) - ) : ( - - - - )} -
+ {projectStore.project ? ( + + + + ) : ( + + + + )}
{projectStore?.project?.name || `...`}
diff --git a/space/components/issues/navbar/issue-board-view.tsx b/space/components/issues/navbar/issue-board-view.tsx index 906d3543d95..f2d624974b3 100644 --- a/space/components/issues/navbar/issue-board-view.tsx +++ b/space/components/issues/navbar/issue-board-view.tsx @@ -1,10 +1,10 @@ -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // constants -import { issueViews } from "constants/data"; +import { issueViews } from "@/constants/data"; // mobx -import { useMobxStore } from "lib/mobx/store-provider"; -import { RootStore } from "store/root"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; import { TIssueBoardKeys } from "types/issue"; export const NavbarIssueBoardView = observer(() => { diff --git a/space/components/issues/navbar/theme.tsx b/space/components/issues/navbar/theme.tsx index cd22654a2a0..1d45625c78c 100644 --- a/space/components/issues/navbar/theme.tsx +++ b/space/components/issues/navbar/theme.tsx @@ -1,9 +1,9 @@ // next theme +import { useEffect, useState } from "react"; +import { observer } from "mobx-react-lite"; import { useTheme } from "next-themes"; // mobx react lite -import { observer } from "mobx-react-lite"; -import { useEffect, useState } from "react"; export const NavbarTheme = observer(() => { const [appTheme, setAppTheme] = useState("light"); diff --git a/space/components/issues/peek-overview/comment/add-comment.tsx b/space/components/issues/peek-overview/comment/add-comment.tsx index ef1a115d282..6df72e2a87f 100644 --- a/space/components/issues/peek-overview/comment/add-comment.tsx +++ b/space/components/issues/peek-overview/comment/add-comment.tsx @@ -1,20 +1,20 @@ import React, { useRef } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { useForm, Controller } from "react-hook-form"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; +import { LiteTextEditorWithRef } from "@plane/lite-text-editor"; +import { Button } from "@plane/ui"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // hooks +import fileService from "@/services/file.service"; +import { RootStore } from "@/store/root"; import useToast from "hooks/use-toast"; // ui -import { Button } from "@plane/ui"; // types import { Comment } from "types/issue"; // components -import { LiteTextEditorWithRef } from "@plane/lite-text-editor"; // service -import fileService from "services/file.service"; -import { RootStore } from "store/root"; const defaultValues: Partial = { comment_html: "", @@ -93,7 +93,7 @@ export const AddComment: React.FC = observer((props) => { customClassName="p-2" editorContentCustomClassNames="min-h-[35px]" debouncedUpdatesEnabled={false} - onChange={(comment_json: Object, comment_html: string) => { + onChange={(comment_json: unknown, comment_html: string) => { onChange(comment_html); }} submitButton={ diff --git a/space/components/issues/peek-overview/comment/comment-detail-card.tsx b/space/components/issues/peek-overview/comment/comment-detail-card.tsx index 7c6abe19956..3db64bbd1de 100644 --- a/space/components/issues/peek-overview/comment/comment-detail-card.tsx +++ b/space/components/issues/peek-overview/comment/comment-detail-card.tsx @@ -1,23 +1,23 @@ import React, { useState } from "react"; import { observer } from "mobx-react-lite"; import { Controller, useForm } from "react-hook-form"; -import { Menu, Transition } from "@headlessui/react"; import { Check, MessageSquare, MoreVertical, X } from "lucide-react"; +import { Menu, Transition } from "@headlessui/react"; // mobx store -import { useMobxStore } from "lib/mobx/store-provider"; // components import { LiteReadOnlyEditorWithRef, LiteTextEditorWithRef } from "@plane/lite-text-editor"; -import { CommentReactions } from "components/issues/peek-overview"; +import { CommentReactions } from "@/components/issues/peek-overview"; // helpers -import { timeAgo } from "helpers/date-time.helper"; +import { timeAgo } from "@/helpers/date-time.helper"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // types -import { Comment } from "types/issue"; // services -import fileService from "services/file.service"; +import fileService from "@/services/file.service"; +import { RootStore } from "@/store/root"; import useEditorSuggestions from "hooks/use-editor-suggestions"; -import { RootStore } from "store/root"; +import { Comment } from "types/issue"; type Props = { workspaceSlug: string; comment: Comment; @@ -115,7 +115,7 @@ export const CommentCard: React.FC = observer((props) => { value={value} debouncedUpdatesEnabled={false} customClassName="min-h-[50px] p-3 shadow-sm" - onChange={(comment_json: Object, comment_html: string) => { + onChange={(comment_json: unknown, comment_html: string) => { onChange(comment_html); }} /> diff --git a/space/components/issues/peek-overview/comment/comment-reactions.tsx b/space/components/issues/peek-overview/comment/comment-reactions.tsx index 3914c41fb08..2192746674c 100644 --- a/space/components/issues/peek-overview/comment/comment-reactions.tsx +++ b/space/components/issues/peek-overview/comment/comment-reactions.tsx @@ -1,15 +1,14 @@ import React from "react"; -import { useRouter } from "next/router"; - // mobx import { observer } from "mobx-react-lite"; -import { useMobxStore } from "lib/mobx/store-provider"; +import { useRouter } from "next/router"; // ui -import { ReactionSelector } from "components/ui"; import { Tooltip } from "@plane/ui"; +import { ReactionSelector } from "@/components/ui"; // helpers -import { groupReactions, renderEmoji } from "helpers/emoji.helper"; +import { groupReactions, renderEmoji } from "@/helpers/emoji.helper"; +import { useMobxStore } from "@/lib/mobx/store-provider"; type Props = { commentId: string; diff --git a/space/components/issues/peek-overview/full-screen-peek-view.tsx b/space/components/issues/peek-overview/full-screen-peek-view.tsx index e07620c05ee..32b850b128d 100644 --- a/space/components/issues/peek-overview/full-screen-peek-view.tsx +++ b/space/components/issues/peek-overview/full-screen-peek-view.tsx @@ -1,13 +1,13 @@ import { observer } from "mobx-react-lite"; // components +import { Loader } from "@plane/ui"; import { PeekOverviewHeader, PeekOverviewIssueActivity, PeekOverviewIssueDetails, PeekOverviewIssueProperties, -} from "components/issues/peek-overview"; +} from "@/components/issues/peek-overview"; // types -import { Loader } from "@plane/ui"; import { IIssue } from "types/issue"; type Props = { diff --git a/space/components/issues/peek-overview/header.tsx b/space/components/issues/peek-overview/header.tsx index 72a302b9099..0a3824f5d0a 100644 --- a/space/components/issues/peek-overview/header.tsx +++ b/space/components/issues/peek-overview/header.tsx @@ -1,18 +1,18 @@ import React from "react"; import { observer } from "mobx-react-lite"; -import { Listbox, Transition } from "@headlessui/react"; import { MoveRight } from "lucide-react"; +import { Listbox, Transition } from "@headlessui/react"; // hooks -import useToast from "hooks/use-toast"; // ui -import { Icon } from "components/ui"; +import { Icon } from "@/components/ui"; // helpers -import { copyTextToClipboard } from "helpers/string.helper"; +import { copyTextToClipboard } from "@/helpers/string.helper"; // store -import { IPeekMode } from "store/issue_details"; -import { RootStore } from "store/root"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { IPeekMode } from "@/store/issue_details"; +import { RootStore } from "@/store/root"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; +import useToast from "hooks/use-toast"; // types import { IIssue } from "types/issue"; diff --git a/space/components/issues/peek-overview/issue-activity.tsx b/space/components/issues/peek-overview/issue-activity.tsx index cbb27679bcd..ad9f65aebbb 100644 --- a/space/components/issues/peek-overview/issue-activity.tsx +++ b/space/components/issues/peek-overview/issue-activity.tsx @@ -1,17 +1,17 @@ import React from "react"; +import { observer } from "mobx-react-lite"; import Link from "next/link"; import { useRouter } from "next/router"; // mobx -import { observer } from "mobx-react-lite"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; +import { Button } from "@plane/ui"; +import { CommentCard, AddComment } from "@/components/issues/peek-overview"; +import { Icon } from "@/components/ui"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // components -import { CommentCard, AddComment } from "components/issues/peek-overview"; // ui -import { Icon } from "components/ui"; -import { Button } from "@plane/ui"; // types import { IIssue } from "types/issue"; diff --git a/space/components/issues/peek-overview/issue-details.tsx b/space/components/issues/peek-overview/issue-details.tsx index d84103388dc..dfc1be430fa 100644 --- a/space/components/issues/peek-overview/issue-details.tsx +++ b/space/components/issues/peek-overview/issue-details.tsx @@ -1,8 +1,8 @@ -import { IssueReactions } from "components/issues/peek-overview"; import { RichReadOnlyEditor } from "@plane/rich-text-editor"; +import { IssueReactions } from "@/components/issues/peek-overview"; // types -import { IIssue } from "types/issue"; import useEditorSuggestions from "hooks/use-editor-suggestions"; +import { IIssue } from "types/issue"; type Props = { issueDetails: IIssue; diff --git a/space/components/issues/peek-overview/issue-emoji-reactions.tsx b/space/components/issues/peek-overview/issue-emoji-reactions.tsx index 928d9115e09..016074ff97d 100644 --- a/space/components/issues/peek-overview/issue-emoji-reactions.tsx +++ b/space/components/issues/peek-overview/issue-emoji-reactions.tsx @@ -1,13 +1,13 @@ import { useEffect } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; +import { Tooltip } from "@plane/ui"; +import { ReactionSelector } from "@/components/ui"; +import { groupReactions, renderEmoji } from "@/helpers/emoji.helper"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // helpers -import { groupReactions, renderEmoji } from "helpers/emoji.helper"; // components -import { ReactionSelector } from "components/ui"; -import { Tooltip } from "@plane/ui"; export const IssueEmojiReactions: React.FC = observer(() => { // router diff --git a/space/components/issues/peek-overview/issue-properties.tsx b/space/components/issues/peek-overview/issue-properties.tsx index a6dcedf0804..1018c22f7cb 100644 --- a/space/components/issues/peek-overview/issue-properties.tsx +++ b/space/components/issues/peek-overview/issue-properties.tsx @@ -1,18 +1,18 @@ // hooks -import useToast from "hooks/use-toast"; // ui import { StateGroupIcon } from "@plane/ui"; // icons -import { Icon } from "components/ui"; +import { Icon } from "@/components/ui"; // helpers -import { copyTextToClipboard, addSpaceIfCamelCase } from "helpers/string.helper"; -import { renderFullDate } from "helpers/date-time.helper"; -import { dueDateIconDetails } from "../board-views/block-due-date"; +import { issueGroupFilter, issuePriorityFilter } from "@/constants/data"; +import { renderFullDate } from "@/helpers/date-time.helper"; +import { copyTextToClipboard, addSpaceIfCamelCase } from "@/helpers/string.helper"; // types -import { IIssue } from "types/issue"; -import { IPeekMode } from "store/issue_details"; +import { IPeekMode } from "@/store/issue_details"; // constants -import { issueGroupFilter, issuePriorityFilter } from "constants/data"; +import useToast from "hooks/use-toast"; +import { IIssue } from "types/issue"; +import { dueDateIconDetails } from "../board-views/block-due-date"; type Props = { issueDetails: IIssue; @@ -94,7 +94,7 @@ export const PeekOverviewIssueProperties: React.FC = ({ issueDetails, mod > {priority && ( - + )} {priority?.title ?? "None"} diff --git a/space/components/issues/peek-overview/issue-reaction.tsx b/space/components/issues/peek-overview/issue-reaction.tsx index 79b45f1048f..eaa5bb8d56f 100644 --- a/space/components/issues/peek-overview/issue-reaction.tsx +++ b/space/components/issues/peek-overview/issue-reaction.tsx @@ -1,5 +1,5 @@ -import { IssueEmojiReactions, IssueVotes } from "components/issues/peek-overview"; -import { useMobxStore } from "lib/mobx/store-provider"; +import { IssueEmojiReactions, IssueVotes } from "@/components/issues/peek-overview"; +import { useMobxStore } from "@/lib/mobx/store-provider"; export const IssueReactions: React.FC = () => { const { project: projectStore } = useMobxStore(); diff --git a/space/components/issues/peek-overview/issue-vote-reactions.tsx b/space/components/issues/peek-overview/issue-vote-reactions.tsx index 40ad145d057..80a2fde83e2 100644 --- a/space/components/issues/peek-overview/issue-vote-reactions.tsx +++ b/space/components/issues/peek-overview/issue-vote-reactions.tsx @@ -1,13 +1,13 @@ import { useState, useEffect } from "react"; +import { observer } from "mobx-react-lite"; import { useRouter } from "next/router"; // mobx -import { observer } from "mobx-react-lite"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; -// ui import { Tooltip } from "@plane/ui"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +// ui export const IssueVotes: React.FC = observer(() => { const [isSubmitting, setIsSubmitting] = useState(false); diff --git a/space/components/issues/peek-overview/layout.tsx b/space/components/issues/peek-overview/layout.tsx index 5a4144db392..b76da13f439 100644 --- a/space/components/issues/peek-overview/layout.tsx +++ b/space/components/issues/peek-overview/layout.tsx @@ -1,19 +1,17 @@ import React, { useEffect, useState } from "react"; +import { observer } from "mobx-react-lite"; import { useRouter } from "next/router"; // mobx -import { observer } from "mobx-react-lite"; // headless ui import { Dialog, Transition } from "@headlessui/react"; // components -import { FullScreenPeekView, SidePeekView } from "components/issues/peek-overview"; +import { FullScreenPeekView, SidePeekView } from "@/components/issues/peek-overview"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; - -type Props = {}; +import { useMobxStore } from "@/lib/mobx/store-provider"; -export const IssuePeekOverview: React.FC = observer(() => { +export const IssuePeekOverview: React.FC = observer(() => { // states const [isSidePeekOpen, setIsSidePeekOpen] = useState(false); const [isModalPeekOpen, setIsModalPeekOpen] = useState(false); diff --git a/space/components/issues/peek-overview/side-peek-view.tsx b/space/components/issues/peek-overview/side-peek-view.tsx index 0ec82a8efe2..8a8636edc15 100644 --- a/space/components/issues/peek-overview/side-peek-view.tsx +++ b/space/components/issues/peek-overview/side-peek-view.tsx @@ -1,13 +1,13 @@ import { observer } from "mobx-react-lite"; // components +import { Loader } from "@plane/ui"; import { PeekOverviewHeader, PeekOverviewIssueActivity, PeekOverviewIssueDetails, PeekOverviewIssueProperties, -} from "components/issues/peek-overview"; +} from "@/components/issues/peek-overview"; -import { Loader } from "@plane/ui"; import { IIssue } from "types/issue"; type Props = { diff --git a/space/components/ui/dropdown.tsx b/space/components/ui/dropdown.tsx index 09d27da42a4..788627094b7 100644 --- a/space/components/ui/dropdown.tsx +++ b/space/components/ui/dropdown.tsx @@ -1,9 +1,9 @@ import { Fragment, useState, useRef } from "react"; import Link from "next/link"; +import { Check, ChevronLeft } from "lucide-react"; import { Popover, Transition } from "@headlessui/react"; // hooks import useOutSideClick from "hooks/use-outside-click"; -import { Check, ChevronLeft } from "lucide-react"; type ItemOptionType = { display: React.ReactNode; @@ -67,13 +67,13 @@ const DropdownList: React.FC = (props) => { const DropdownItem: React.FC = (props) => { const { item } = props; - const { display, children, as: as_, href, onClick, isSelected } = item; + const { display, children, as: itemAs, href, onClick, isSelected } = item; const [open, setOpen] = useState(false); return (
- {(!as_ || as_ === "button" || as_ === "div") && ( + {(!itemAs || itemAs === "button" || itemAs === "div") && ( )} - {as_ === "link" && {display}} + {itemAs === "link" && {display}} {children && setOpen(false)} items={children} />}
diff --git a/space/components/ui/reaction-selector.tsx b/space/components/ui/reaction-selector.tsx index ae18d4883a4..9b999a618ee 100644 --- a/space/components/ui/reaction-selector.tsx +++ b/space/components/ui/reaction-selector.tsx @@ -4,10 +4,10 @@ import { Fragment } from "react"; import { Popover, Transition } from "@headlessui/react"; // helper -import { renderEmoji } from "helpers/emoji.helper"; +import { Icon } from "@/components/ui"; +import { renderEmoji } from "@/helpers/emoji.helper"; // icons -import { Icon } from "components/ui"; const reactionEmojis = ["128077", "128078", "128516", "128165", "128533", "129505", "9992", "128064"]; diff --git a/space/components/views/login.tsx b/space/components/views/login.tsx index 5ce24c8690f..e622f3e7812 100644 --- a/space/components/views/login.tsx +++ b/space/components/views/login.tsx @@ -1,11 +1,11 @@ +import { observer } from "mobx-react-lite"; import Image from "next/image"; // mobx -import { observer } from "mobx-react-lite"; -import { useMobxStore } from "lib/mobx/store-provider"; -// components -import { SignInRoot, UserLoggedIn } from "components/accounts"; import { Loader } from "@plane/ui"; +import { SignInRoot, UserLoggedIn } from "@/components/accounts"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +// components // images import BluePlaneLogoWithoutText from "public/plane-logos/blue-without-text-new.png"; diff --git a/space/components/views/project-details.tsx b/space/components/views/project-details.tsx index e3a0baaaaaa..914272a8bf1 100644 --- a/space/components/views/project-details.tsx +++ b/space/components/views/project-details.tsx @@ -1,18 +1,18 @@ import { useEffect } from "react"; +import { observer } from "mobx-react-lite"; import Image from "next/image"; import { useRouter } from "next/router"; -import { observer } from "mobx-react-lite"; // components -import { IssueListView } from "components/issues/board-views/list"; -import { IssueKanbanView } from "components/issues/board-views/kanban"; -import { IssueCalendarView } from "components/issues/board-views/calendar"; -import { IssueSpreadsheetView } from "components/issues/board-views/spreadsheet"; -import { IssueGanttView } from "components/issues/board-views/gantt"; -import { IssuePeekOverview } from "components/issues/peek-overview"; -import { IssueAppliedFilters } from "components/issues/filters/applied-filters/root"; +import { IssueCalendarView } from "@/components/issues/board-views/calendar"; +import { IssueGanttView } from "@/components/issues/board-views/gantt"; +import { IssueKanbanView } from "@/components/issues/board-views/kanban"; +import { IssueListView } from "@/components/issues/board-views/list"; +import { IssueSpreadsheetView } from "@/components/issues/board-views/spreadsheet"; +import { IssueAppliedFilters } from "@/components/issues/filters/applied-filters/root"; +import { IssuePeekOverview } from "@/components/issues/peek-overview"; // mobx store -import { RootStore } from "store/root"; -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; // assets import SomethingWentWrongImage from "public/something-went-wrong.svg"; diff --git a/space/contexts/toast.context.tsx b/space/contexts/toast.context.tsx index a382b4fd216..86608f02c96 100644 --- a/space/contexts/toast.context.tsx +++ b/space/contexts/toast.context.tsx @@ -2,7 +2,7 @@ import React, { createContext, useCallback, useReducer } from "react"; // uuid import { v4 as uuid } from "uuid"; // components -import ToastAlert from "components/ui/toast-alert"; +import ToastAlert from "@/components/ui/toast-alert"; export const toastContext = createContext({} as ContextType); diff --git a/space/hooks/use-editor-suggestions.tsx b/space/hooks/use-editor-suggestions.tsx index 0659121b7cd..81bf6875490 100644 --- a/space/hooks/use-editor-suggestions.tsx +++ b/space/hooks/use-editor-suggestions.tsx @@ -1,5 +1,5 @@ -import { useMobxStore } from "lib/mobx/store-provider"; -import { RootStore } from "store/root"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; const useEditorSuggestions = () => { const { mentionsStore }: RootStore = useMobxStore(); diff --git a/space/hooks/use-sign-in-redirection.tsx b/space/hooks/use-sign-in-redirection.tsx index 306e7985ad6..7517c7dfc24 100644 --- a/space/hooks/use-sign-in-redirection.tsx +++ b/space/hooks/use-sign-in-redirection.tsx @@ -1,7 +1,7 @@ import { useCallback, useState } from "react"; import { useRouter } from "next/router"; // mobx store -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // types import { IUser } from "types/user"; diff --git a/space/hooks/use-toast.tsx b/space/hooks/use-toast.tsx index 6de3c104c67..13f598f88af 100644 --- a/space/hooks/use-toast.tsx +++ b/space/hooks/use-toast.tsx @@ -1,5 +1,5 @@ import { useContext } from "react"; -import { toastContext } from "contexts/toast.context"; +import { toastContext } from "@/contexts/toast.context"; const useToast = () => { const toastContextData = useContext(toastContext); diff --git a/space/layouts/project-layout.tsx b/space/layouts/project-layout.tsx index 3ae079982ae..c5946277f83 100644 --- a/space/layouts/project-layout.tsx +++ b/space/layouts/project-layout.tsx @@ -4,7 +4,7 @@ import Image from "next/image"; import { observer } from "mobx-react-lite"; import planeLogo from "public/plane-logo.svg"; // components -import IssueNavbar from "components/issues/navbar"; +import IssueNavbar from "@/components/issues/navbar"; const ProjectLayout = ({ children }: { children: React.ReactNode }) => (
diff --git a/space/lib/mobx/store-init.tsx b/space/lib/mobx/store-init.tsx index bcefd203b34..897c8e3a2a2 100644 --- a/space/lib/mobx/store-init.tsx +++ b/space/lib/mobx/store-init.tsx @@ -2,7 +2,7 @@ import { useEffect } from "react"; // js cookie import Cookie from "js-cookie"; // mobx store -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; const MobxStoreInit = () => { const { user: userStore } = useMobxStore(); diff --git a/space/lib/mobx/store-provider.tsx b/space/lib/mobx/store-provider.tsx index c6fde14ae02..18854c1d526 100644 --- a/space/lib/mobx/store-provider.tsx +++ b/space/lib/mobx/store-provider.tsx @@ -2,17 +2,17 @@ import { createContext, useContext } from "react"; // mobx store -import { RootStore } from "store/root"; +import { RootStore } from "@/store/root"; let rootStore: RootStore = new RootStore(); export const MobxStoreContext = createContext(rootStore); const initializeStore = () => { - const _rootStore: RootStore = rootStore ?? new RootStore(); - if (typeof window === "undefined") return _rootStore; - if (!rootStore) rootStore = _rootStore; - return _rootStore; + const singletonRootStore: RootStore = rootStore ?? new RootStore(); + if (typeof window === "undefined") return singletonRootStore; + if (!rootStore) rootStore = singletonRootStore; + return singletonRootStore; }; export const MobxStoreProvider = ({ children }: any) => { diff --git a/space/next.config.js b/space/next.config.js index 18b9275a1de..352d9b61b3f 100644 --- a/space/next.config.js +++ b/space/next.config.js @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ /** @type {import('next').NextConfig} */ require("dotenv").config({ path: ".env" }); const { withSentryConfig } = require("@sentry/nextjs"); @@ -26,8 +27,11 @@ const nextConfig = { output: "standalone", }; -if (parseInt(process.env.NEXT_PUBLIC_ENABLE_SENTRY || "0")) { - module.exports = withSentryConfig(nextConfig, { silent: true }, { hideSourceMaps: true }); +if (parseInt(process.env.NEXT_PUBLIC_ENABLE_SENTRY || "0", 10)) { + module.exports = withSentryConfig(nextConfig, + { silent: true, authToken: process.env.SENTRY_AUTH_TOKEN }, + { hideSourceMaps: true } + ); } else { module.exports = nextConfig; } diff --git a/space/package.json b/space/package.json index a1d600a60bf..6e736f9b072 100644 --- a/space/package.json +++ b/space/package.json @@ -1,6 +1,6 @@ { "name": "space", - "version": "0.16.0", + "version": "0.17.0", "private": true, "scripts": { "dev": "turbo run develop", @@ -20,8 +20,9 @@ "@plane/document-editor": "*", "@plane/lite-text-editor": "*", "@plane/rich-text-editor": "*", + "@plane/types": "*", "@plane/ui": "*", - "@sentry/nextjs": "^7.85.0", + "@sentry/nextjs": "^7.108.0", "axios": "^1.3.4", "clsx": "^2.0.0", "dotenv": "^16.3.1", @@ -49,9 +50,7 @@ "@types/react-dom": "^18.2.17", "@types/uuid": "^9.0.1", "@typescript-eslint/eslint-plugin": "^5.48.2", - "eslint": "8.34.0", "eslint-config-custom": "*", - "eslint-config-next": "13.2.1", "tailwind-config-custom": "*", "tsconfig": "*" } diff --git a/space/pages/[workspace_slug]/[project_slug]/index.tsx b/space/pages/[workspace_slug]/[project_slug]/index.tsx index e50c01c18e9..b0ee7d0cd17 100644 --- a/space/pages/[workspace_slug]/[project_slug]/index.tsx +++ b/space/pages/[workspace_slug]/[project_slug]/index.tsx @@ -4,11 +4,11 @@ import { useRouter } from "next/router"; import useSWR from "swr"; /// layouts -import ProjectLayout from "layouts/project-layout"; // components -import { ProjectDetailsView } from "components/views/project-details"; +import { ProjectDetailsView } from "@/components/views/project-details"; // lib -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import ProjectLayout from "layouts/project-layout"; const WorkspaceProjectPage = (props: any) => { const SITE_TITLE = props?.project_settings?.project_details?.name || "Plane | Deploy"; diff --git a/space/pages/_app.tsx b/space/pages/_app.tsx index da1db272c9d..9d80d53a372 100644 --- a/space/pages/_app.tsx +++ b/space/pages/_app.tsx @@ -1,15 +1,15 @@ -import Head from "next/head"; import type { AppProps } from "next/app"; +import Head from "next/head"; import { ThemeProvider } from "next-themes"; // styles -import "styles/globals.css"; +import "@/styles/globals.css"; // contexts -import { ToastContextProvider } from "contexts/toast.context"; +import { SITE_NAME, SITE_DESCRIPTION, SITE_URL, TWITTER_USER_NAME, SITE_KEYWORDS, SITE_TITLE } from "@/constants/seo"; +import { ToastContextProvider } from "@/contexts/toast.context"; // mobx store provider -import { MobxStoreProvider } from "lib/mobx/store-provider"; -import MobxStoreInit from "lib/mobx/store-init"; +import MobxStoreInit from "@/lib/mobx/store-init"; +import { MobxStoreProvider } from "@/lib/mobx/store-provider"; // constants -import { SITE_NAME, SITE_DESCRIPTION, SITE_URL, TWITTER_USER_NAME, SITE_KEYWORDS, SITE_TITLE } from "constants/seo"; const prefix = parseInt(process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX || "0") === 0 ? "/" : "/spaces/"; diff --git a/space/pages/accounts/password.tsx b/space/pages/accounts/password.tsx index 85da11290f2..16e805b6fba 100644 --- a/space/pages/accounts/password.tsx +++ b/space/pages/accounts/password.tsx @@ -3,20 +3,20 @@ import Image from "next/image"; import Link from "next/link"; import { useRouter } from "next/router"; import { useTheme } from "next-themes"; -import { Lightbulb } from "lucide-react"; import { Controller, useForm } from "react-hook-form"; +import { Lightbulb } from "lucide-react"; // services -import { AuthService } from "services/authentication.service"; +import { Button, Input } from "@plane/ui"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/authentication.service"; // hooks -import useToast from "hooks/use-toast"; import useSignInRedirection from "hooks/use-sign-in-redirection"; +import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; // images -import BluePlaneLogoWithoutText from "public/plane-logos/blue-without-text-new.png"; import latestFeatures from "public/onboarding/onboarding-pages.svg"; +import BluePlaneLogoWithoutText from "public/plane-logos/blue-without-text-new.png"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; type TResetPasswordFormValues = { email: string; diff --git a/space/pages/index.tsx b/space/pages/index.tsx index e680c257cac..b1c2bd97cf8 100644 --- a/space/pages/index.tsx +++ b/space/pages/index.tsx @@ -1,13 +1,13 @@ import { useEffect } from "react"; +import { observer } from "mobx-react-lite"; import { NextPage } from "next"; import { useRouter } from "next/router"; -import { observer } from "mobx-react-lite"; // components -import { LoginView } from "components/views"; +import { LoginView } from "@/components/views"; // store -import { RootStore } from "store/root"; -import { useMobxStore } from "lib/mobx/store-provider"; +import { useMobxStore } from "@/lib/mobx/store-provider"; +import { RootStore } from "@/store/root"; const Index: NextPage = observer(() => { const router = useRouter(); diff --git a/space/pages/onboarding/index.tsx b/space/pages/onboarding/index.tsx index 423eec2de7c..95acaef2940 100644 --- a/space/pages/onboarding/index.tsx +++ b/space/pages/onboarding/index.tsx @@ -1,9 +1,9 @@ import React, { useEffect } from "react"; // mobx import { observer } from "mobx-react-lite"; -import { useMobxStore } from "lib/mobx/store-provider"; +import { OnBoardingForm } from "@/components/accounts/onboarding-form"; +import { useMobxStore } from "@/lib/mobx/store-provider"; // components -import { OnBoardingForm } from "components/accounts/onboarding-form"; const imagePrefix = Boolean(parseInt(process.env.NEXT_PUBLIC_DEPLOY_WITH_NGINX || "0")) ? "/spaces" : ""; diff --git a/space/services/app-config.service.ts b/space/services/app-config.service.ts index af79935cf27..a6a1a9cf6f8 100644 --- a/space/services/app-config.service.ts +++ b/space/services/app-config.service.ts @@ -1,7 +1,7 @@ // services -import APIService from "services/api.service"; +import APIService from "@/services/api.service"; // helper -import { API_BASE_URL } from "helpers/common.helper"; +import { API_BASE_URL } from "@/helpers/common.helper"; // types import { IAppConfig } from "types/app"; diff --git a/space/services/authentication.service.ts b/space/services/authentication.service.ts index 7bf0eccfa0f..0fbf0c71b7d 100644 --- a/space/services/authentication.service.ts +++ b/space/services/authentication.service.ts @@ -1,6 +1,6 @@ // services -import APIService from "services/api.service"; -import { API_BASE_URL } from "helpers/common.helper"; +import APIService from "@/services/api.service"; +import { API_BASE_URL } from "@/helpers/common.helper"; import { IEmailCheckData, IEmailCheckResponse, ILoginTokenResponse, IPasswordSignInData } from "types/auth"; export class AuthService extends APIService { diff --git a/space/services/file.service.ts b/space/services/file.service.ts index ecebf92b7d3..52793ec75fc 100644 --- a/space/services/file.service.ts +++ b/space/services/file.service.ts @@ -1,7 +1,7 @@ // services -import APIService from "services/api.service"; +import APIService from "@/services/api.service"; // helpers -import { API_BASE_URL } from "helpers/common.helper"; +import { API_BASE_URL } from "@/helpers/common.helper"; import axios from "axios"; interface UnSplashImage { diff --git a/space/services/issue.service.ts b/space/services/issue.service.ts index 5feb1b00b92..b6f2e3be22b 100644 --- a/space/services/issue.service.ts +++ b/space/services/issue.service.ts @@ -1,6 +1,6 @@ // services -import APIService from "services/api.service"; -import { API_BASE_URL } from "helpers/common.helper"; +import APIService from "@/services/api.service"; +import { API_BASE_URL } from "@/helpers/common.helper"; class IssueService extends APIService { constructor() { diff --git a/space/services/project.service.ts b/space/services/project.service.ts index 0d6eca951b4..2e173d28282 100644 --- a/space/services/project.service.ts +++ b/space/services/project.service.ts @@ -1,6 +1,6 @@ // services -import APIService from "services/api.service"; -import { API_BASE_URL } from "helpers/common.helper"; +import APIService from "@/services/api.service"; +import { API_BASE_URL } from "@/helpers/common.helper"; class ProjectService extends APIService { constructor() { diff --git a/space/services/user.service.ts b/space/services/user.service.ts index c8232afa9b1..e49378d93cf 100644 --- a/space/services/user.service.ts +++ b/space/services/user.service.ts @@ -1,7 +1,7 @@ // services -import APIService from "services/api.service"; +import APIService from "@/services/api.service"; // helpers -import { API_BASE_URL } from "helpers/common.helper"; +import { API_BASE_URL } from "@/helpers/common.helper"; // types import { IUser } from "types/user"; diff --git a/space/store/issue.ts b/space/store/issue.ts index 02dd3cdd00c..8e4876f6670 100644 --- a/space/store/issue.ts +++ b/space/store/issue.ts @@ -1,6 +1,6 @@ import { observable, action, computed, makeObservable, runInAction } from "mobx"; // services -import IssueService from "services/issue.service"; +import IssueService from "@/services/issue.service"; // store import { RootStore } from "./root"; // types diff --git a/space/store/issue_details.ts b/space/store/issue_details.ts index 346206e94ee..766cb979d8b 100644 --- a/space/store/issue_details.ts +++ b/space/store/issue_details.ts @@ -3,7 +3,7 @@ import { v4 as uuidv4 } from "uuid"; // store import { RootStore } from "./root"; // services -import IssueService from "services/issue.service"; +import IssueService from "@/services/issue.service"; import { IIssue, IVote } from "types/issue"; export type IPeekMode = "side" | "modal" | "full"; diff --git a/space/store/issues/base-issue-filter.store.ts b/space/store/issues/base-issue-filter.store.ts index 2cd2e3bc911..3859d94c665 100644 --- a/space/store/issues/base-issue-filter.store.ts +++ b/space/store/issues/base-issue-filter.store.ts @@ -1,5 +1,5 @@ // types -import { RootStore } from "store/root"; +import { RootStore } from "@/store/root"; export interface IIssueFilterBaseStore { // helper methods diff --git a/space/store/issues/issue-filters.store.ts b/space/store/issues/issue-filters.store.ts index f2408e290d0..fffa42e031c 100644 --- a/space/store/issues/issue-filters.store.ts +++ b/space/store/issues/issue-filters.store.ts @@ -1,6 +1,6 @@ import { action, makeObservable, observable, runInAction, computed } from "mobx"; // types -import { RootStore } from "store/root"; +import { RootStore } from "@/store/root"; import { IIssueFilterOptions, TIssueParams } from "./types"; import { handleIssueQueryParamsByLayout } from "./helpers"; import { IssueFilterBaseStore } from "./base-issue-filter.store"; diff --git a/space/store/project.ts b/space/store/project.ts index 76b4d06cb64..b12cf18dbfc 100644 --- a/space/store/project.ts +++ b/space/store/project.ts @@ -1,7 +1,7 @@ // mobx import { observable, action, makeObservable, runInAction } from "mobx"; // service -import ProjectService from "services/project.service"; +import ProjectService from "@/services/project.service"; import { TIssueBoardKeys } from "types/issue"; // types import { IWorkspace, IProject, IProjectSettings } from "types/project"; diff --git a/space/store/user.ts b/space/store/user.ts index e97f655f728..0e9b90106e9 100644 --- a/space/store/user.ts +++ b/space/store/user.ts @@ -1,7 +1,7 @@ // mobx import { observable, action, computed, makeObservable, runInAction } from "mobx"; // service -import { UserService } from "services/user.service"; +import { UserService } from "@/services/user.service"; // types import { IUser } from "types/user"; diff --git a/space/tsconfig.json b/space/tsconfig.json index 3047edb7c1a..9d3e164bed0 100644 --- a/space/tsconfig.json +++ b/space/tsconfig.json @@ -4,6 +4,9 @@ "exclude": ["node_modules"], "compilerOptions": { "baseUrl": ".", - "jsx": "preserve" + "jsx": "preserve", + "paths": { + "@/*": ["*"] + } } } diff --git a/space/types/project.ts b/space/types/project.ts index e0e1bba9ef2..7e81d366c01 100644 --- a/space/types/project.ts +++ b/space/types/project.ts @@ -1,3 +1,5 @@ +import { TProjectLogoProps } from "@plane/types"; + export interface IWorkspace { id: string; name: string; @@ -9,10 +11,8 @@ export interface IProject { identifier: string; name: string; description: string; - icon: string; cover_image: string | null; - icon_prop: string | null; - emoji: string | null; + logo_props: TProjectLogoProps; } export interface IProjectSettings { diff --git a/turbo.json b/turbo.json index bd5ee34b59e..4e8c4ee8151 100644 --- a/turbo.json +++ b/turbo.json @@ -16,37 +16,26 @@ "NEXT_PUBLIC_DEPLOY_WITH_NGINX", "NEXT_PUBLIC_POSTHOG_KEY", "NEXT_PUBLIC_POSTHOG_HOST", - "JITSU_TRACKER_ACCESS_KEY", - "JITSU_TRACKER_HOST" + "NEXT_PUBLIC_POSTHOG_DEBUG", + "SENTRY_AUTH_TOKEN" ], "pipeline": { "build": { - "dependsOn": [ - "^build" - ], - "outputs": [ - ".next/**", - "dist/**" - ] + "dependsOn": ["^build"], + "outputs": [".next/**", "dist/**"] }, "develop": { "cache": false, "persistent": true, - "dependsOn": [ - "^build" - ] + "dependsOn": ["^build"] }, "dev": { "cache": false, "persistent": true, - "dependsOn": [ - "^build" - ] + "dependsOn": ["^build"] }, "test": { - "dependsOn": [ - "^build" - ], + "dependsOn": ["^build"], "outputs": [] }, "lint": { diff --git a/web/.eslintrc.js b/web/.eslintrc.js index c8df607506c..57d39bcfad1 100644 --- a/web/.eslintrc.js +++ b/web/.eslintrc.js @@ -1,4 +1,52 @@ module.exports = { root: true, extends: ["custom"], + parser: "@typescript-eslint/parser", + settings: { + "import/resolver": { + typescript: {}, + node: { + moduleDirectory: ["node_modules", "."], + }, + }, + }, + rules: { + "import/order": [ + "error", + { + groups: ["builtin", "external", "internal", "parent", "sibling",], + pathGroups: [ + { + pattern: "react", + group: "external", + position: "before", + }, + { + pattern: "lucide-react", + group: "external", + position: "after", + }, + { + pattern: "@headlessui/**", + group: "external", + position: "after", + }, + { + pattern: "@plane/**", + group: "external", + position: "after", + }, + { + pattern: "@/**", + group: "internal", + } + ], + pathGroupsExcludedImportTypes: ["builtin", "internal", "react"], + alphabetize: { + order: "asc", + caseInsensitive: true, + }, + }, + ], + }, }; diff --git a/web/components/account/deactivate-account-modal.tsx b/web/components/account/deactivate-account-modal.tsx index 701db6ad945..41508ad67ad 100644 --- a/web/components/account/deactivate-account-modal.tsx +++ b/web/components/account/deactivate-account-modal.tsx @@ -1,15 +1,13 @@ import React, { useState } from "react"; import { useRouter } from "next/router"; import { useTheme } from "next-themes"; -import { Dialog, Transition } from "@headlessui/react"; -import { Trash2 } from "lucide-react"; import { mutate } from "swr"; +import { Trash2 } from "lucide-react"; +import { Dialog, Transition } from "@headlessui/react"; // hooks -import { useUser } from "hooks/store"; // ui -import { Button } from "@plane/ui"; -// hooks -import useToast from "hooks/use-toast"; +import { Button, TOAST_TYPE, setToast } from "@plane/ui"; +import { useUser } from "@/hooks/store"; type Props = { isOpen: boolean; @@ -26,7 +24,6 @@ export const DeactivateAccountModal: React.FC = (props) => { const router = useRouter(); - const { setToastAlert } = useToast(); const { setTheme } = useTheme(); const handleClose = () => { @@ -39,8 +36,8 @@ export const DeactivateAccountModal: React.FC = (props) => { await deactivateAccount() .then(() => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Account deactivated successfully.", }); @@ -50,8 +47,8 @@ export const DeactivateAccountModal: React.FC = (props) => { handleClose(); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error, }) @@ -89,8 +86,11 @@ export const DeactivateAccountModal: React.FC = (props) => {
-
-
diff --git a/web/components/account/sign-in-forms/password.tsx b/web/components/account/sign-in-forms/password.tsx index 98719df63a1..8d7c9f89198 100644 --- a/web/components/account/sign-in-forms/password.tsx +++ b/web/components/account/sign-in-forms/password.tsx @@ -1,23 +1,22 @@ import React, { useState } from "react"; -import Link from "next/link"; import { observer } from "mobx-react-lite"; +import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; import { Eye, EyeOff, XCircle } from "lucide-react"; +import { IPasswordSignInData } from "@plane/types"; // services -import { AuthService } from "services/auth.service"; +import { Button, Input, TOAST_TYPE, setToast } from "@plane/ui"; +import { ESignInSteps, ForgotPasswordPopover } from "@/components/account"; +import { FORGOT_PASSWORD, SIGN_IN_WITH_PASSWORD } from "@/constants/event-tracker"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { useApplication, useEventTracker } from "@/hooks/store"; +import { AuthService } from "@/services/auth.service"; // hooks -import useToast from "hooks/use-toast"; -import { useApplication, useEventTracker } from "hooks/store"; // components -import { ESignInSteps, ForgotPasswordPopover } from "components/account"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types -import { IPasswordSignInData } from "@plane/types"; // constants -import { FORGOT_PASSWORD, SIGN_IN_WITH_PASSWORD } from "constants/event-tracker"; type Props = { email: string; @@ -43,8 +42,6 @@ export const SignInPasswordForm: React.FC = observer((props) => { // states const [isSendingUniqueCode, setIsSendingUniqueCode] = useState(false); const [showPassword, setShowPassword] = useState(false); - // toast alert - const { setToastAlert } = useToast(); const { config: { envConfig }, } = useApplication(); @@ -83,8 +80,8 @@ export const SignInPasswordForm: React.FC = observer((props) => { await onSubmit(); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) @@ -107,8 +104,8 @@ export const SignInPasswordForm: React.FC = observer((props) => { .generateUniqueCode({ email: emailFormValue }) .then(() => handleStepChange(ESignInSteps.USE_UNIQUE_CODE_FROM_PASSWORD)) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) diff --git a/web/components/account/sign-in-forms/root.tsx b/web/components/account/sign-in-forms/root.tsx index 879937be5bd..626c4b08c24 100644 --- a/web/components/account/sign-in-forms/root.tsx +++ b/web/components/account/sign-in-forms/root.tsx @@ -1,20 +1,20 @@ import React, { useEffect, useState } from "react"; -import Link from "next/link"; import { observer } from "mobx-react-lite"; +import Link from "next/link"; // hooks -import { useApplication, useEventTracker } from "hooks/store"; -import useSignInRedirection from "hooks/use-sign-in-redirection"; -// components -import { LatestFeatureBlock } from "components/common"; import { SignInEmailForm, SignInUniqueCodeForm, SignInPasswordForm, OAuthOptions, SignInOptionalSetPasswordForm, -} from "components/account"; +} from "@/components/account"; +import { LatestFeatureBlock } from "@/components/common"; +import { NAVIGATE_TO_SIGNUP } from "@/constants/event-tracker"; +import { useApplication, useEventTracker } from "@/hooks/store"; +import useSignInRedirection from "@/hooks/use-sign-in-redirection"; +// components // constants -import { NAVIGATE_TO_SIGNUP } from "constants/event-tracker"; export enum ESignInSteps { EMAIL = "EMAIL", diff --git a/web/components/account/sign-in-forms/unique-code.tsx b/web/components/account/sign-in-forms/unique-code.tsx index 55dbe86e246..2a9144469a7 100644 --- a/web/components/account/sign-in-forms/unique-code.tsx +++ b/web/components/account/sign-in-forms/unique-code.tsx @@ -1,21 +1,22 @@ import React, { useState } from "react"; import { Controller, useForm } from "react-hook-form"; import { XCircle } from "lucide-react"; +import { IEmailCheckData, IMagicSignInData } from "@plane/types"; // services -import { AuthService } from "services/auth.service"; -import { UserService } from "services/user.service"; +import { Button, Input, TOAST_TYPE, setToast } from "@plane/ui"; + +import { CODE_VERIFIED } from "@/constants/event-tracker"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { useEventTracker } from "@/hooks/store"; + +import useTimer from "@/hooks/use-timer"; +import { AuthService } from "@/services/auth.service"; +import { UserService } from "@/services/user.service"; // hooks -import useToast from "hooks/use-toast"; -import useTimer from "hooks/use-timer"; -import { useEventTracker } from "hooks/store"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types -import { IEmailCheckData, IMagicSignInData } from "@plane/types"; // constants -import { CODE_VERIFIED } from "constants/event-tracker"; type Props = { email: string; @@ -42,8 +43,6 @@ export const SignInUniqueCodeForm: React.FC = (props) => { const { email, onSubmit, handleEmailClear, submitButtonText } = props; // states const [isRequestingNewCode, setIsRequestingNewCode] = useState(false); - // toast alert - const { setToastAlert } = useToast(); // store hooks const { captureEvent } = useEventTracker(); // timer @@ -84,8 +83,8 @@ export const SignInUniqueCodeForm: React.FC = (props) => { captureEvent(CODE_VERIFIED, { state: "FAILED", }); - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }); @@ -101,8 +100,8 @@ export const SignInUniqueCodeForm: React.FC = (props) => { .generateUniqueCode(payload) .then(() => { setResendCodeTimer(30); - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "A new unique code has been sent to your email.", }); @@ -113,8 +112,8 @@ export const SignInUniqueCodeForm: React.FC = (props) => { }); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) @@ -204,8 +203,8 @@ export const SignInUniqueCodeForm: React.FC = (props) => { {resendTimerCode > 0 ? `Request new code in ${resendTimerCode}s` : isRequestingNewCode - ? "Requesting new code" - : "Request new code"} + ? "Requesting new code" + : "Request new code"}
diff --git a/web/components/account/sign-up-forms/email.tsx b/web/components/account/sign-up-forms/email.tsx index 0d5861b4ee2..bc4fb1d86d8 100644 --- a/web/components/account/sign-up-forms/email.tsx +++ b/web/components/account/sign-up-forms/email.tsx @@ -1,17 +1,15 @@ import React from "react"; +import { observer } from "mobx-react-lite"; import { Controller, useForm } from "react-hook-form"; import { XCircle } from "lucide-react"; -import { observer } from "mobx-react-lite"; +import { IEmailCheckData } from "@plane/types"; // services -import { AuthService } from "services/auth.service"; -// hooks -import useToast from "hooks/use-toast"; +import { Button, Input, TOAST_TYPE, setToast } from "@plane/ui"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/auth.service"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types -import { IEmailCheckData } from "@plane/types"; type Props = { onSubmit: () => void; @@ -27,7 +25,6 @@ const authService = new AuthService(); export const SignUpEmailForm: React.FC = observer((props) => { const { onSubmit, updateEmail } = props; // hooks - const { setToastAlert } = useToast(); const { control, formState: { errors, isSubmitting, isValid }, @@ -52,8 +49,8 @@ export const SignUpEmailForm: React.FC = observer((props) => { .emailCheck(payload) .then(() => onSubmit()) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) diff --git a/web/components/account/sign-up-forms/optional-set-password.tsx b/web/components/account/sign-up-forms/optional-set-password.tsx index b49adabbb5c..c269c389ac1 100644 --- a/web/components/account/sign-up-forms/optional-set-password.tsx +++ b/web/components/account/sign-up-forms/optional-set-password.tsx @@ -1,19 +1,19 @@ import React, { useState } from "react"; import { Controller, useForm } from "react-hook-form"; // services -import { AuthService } from "services/auth.service"; +import { Eye, EyeOff } from "lucide-react"; +import { Button, Input, TOAST_TYPE, setToast } from "@plane/ui"; +import { ESignUpSteps } from "@/components/account"; +import { PASSWORD_CREATE_SKIPPED, SETUP_PASSWORD } from "@/constants/event-tracker"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { useEventTracker } from "@/hooks/store"; +import { AuthService } from "@/services/auth.service"; // hooks -import useToast from "hooks/use-toast"; -import { useEventTracker } from "hooks/store"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; +// components // constants -import { ESignUpSteps } from "components/account"; -import { PASSWORD_CREATE_SELECTED, PASSWORD_CREATE_SKIPPED, SETUP_PASSWORD } from "constants/event-tracker"; // icons -import { Eye, EyeOff } from "lucide-react"; type Props = { email: string; @@ -41,8 +41,6 @@ export const SignUpOptionalSetPasswordForm: React.FC = (props) => { const [showPassword, setShowPassword] = useState(false); // store hooks const { captureEvent } = useEventTracker(); - // toast alert - const { setToastAlert } = useToast(); // form info const { control, @@ -65,8 +63,8 @@ export const SignUpOptionalSetPasswordForm: React.FC = (props) => { await authService .setPassword(payload) .then(async () => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Password created successfully.", }); @@ -81,8 +79,8 @@ export const SignUpOptionalSetPasswordForm: React.FC = (props) => { state: "FAILED", first_time: true, }); - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }); @@ -164,7 +162,7 @@ export const SignUpOptionalSetPasswordForm: React.FC = (props) => {
)} /> -

+

This password will continue to be your account{"'"}s password.

diff --git a/web/components/account/sign-up-forms/password.tsx b/web/components/account/sign-up-forms/password.tsx index 293e03ef874..9cafabe812c 100644 --- a/web/components/account/sign-up-forms/password.tsx +++ b/web/components/account/sign-up-forms/password.tsx @@ -1,18 +1,16 @@ import React, { useState } from "react"; -import Link from "next/link"; import { observer } from "mobx-react-lite"; +import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; import { Eye, EyeOff, XCircle } from "lucide-react"; +import { IPasswordSignInData } from "@plane/types"; // services -import { AuthService } from "services/auth.service"; -// hooks -import useToast from "hooks/use-toast"; // ui -import { Button, Input } from "@plane/ui"; +import { Button, Input, TOAST_TYPE, setToast } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { AuthService } from "@/services/auth.service"; // types -import { IPasswordSignInData } from "@plane/types"; type Props = { onSubmit: () => Promise; @@ -34,8 +32,6 @@ export const SignUpPasswordForm: React.FC = observer((props) => { const { onSubmit } = props; // states const [showPassword, setShowPassword] = useState(false); - // toast alert - const { setToastAlert } = useToast(); // form info const { control, @@ -59,8 +55,8 @@ export const SignUpPasswordForm: React.FC = observer((props) => { .passwordSignIn(payload) .then(async () => await onSubmit()) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) @@ -138,7 +134,7 @@ export const SignUpPasswordForm: React.FC = observer((props) => {
)} /> -

+

This password will continue to be your account{"'"}s password.

diff --git a/web/components/account/sign-up-forms/root.tsx b/web/components/account/sign-up-forms/root.tsx index 8eeb5e99f96..4fab3abcdd3 100644 --- a/web/components/account/sign-up-forms/root.tsx +++ b/web/components/account/sign-up-forms/root.tsx @@ -1,19 +1,19 @@ import React, { useEffect, useState } from "react"; import { observer } from "mobx-react-lite"; // hooks -import { useApplication, useEventTracker } from "hooks/store"; -import useSignInRedirection from "hooks/use-sign-in-redirection"; -// components +import Link from "next/link"; import { OAuthOptions, SignUpEmailForm, SignUpOptionalSetPasswordForm, SignUpPasswordForm, SignUpUniqueCodeForm, -} from "components/account"; -import Link from "next/link"; +} from "@/components/account"; +import { NAVIGATE_TO_SIGNIN } from "@/constants/event-tracker"; +import { useApplication, useEventTracker } from "@/hooks/store"; +import useSignInRedirection from "@/hooks/use-sign-in-redirection"; +// components // constants -import { NAVIGATE_TO_SIGNIN } from "constants/event-tracker"; export enum ESignUpSteps { EMAIL = "EMAIL", diff --git a/web/components/account/sign-up-forms/unique-code.tsx b/web/components/account/sign-up-forms/unique-code.tsx index 1b54ef9ebc1..bc6e1ee4c88 100644 --- a/web/components/account/sign-up-forms/unique-code.tsx +++ b/web/components/account/sign-up-forms/unique-code.tsx @@ -2,21 +2,21 @@ import React, { useState } from "react"; import Link from "next/link"; import { Controller, useForm } from "react-hook-form"; import { XCircle } from "lucide-react"; +import { IEmailCheckData, IMagicSignInData } from "@plane/types"; // services -import { AuthService } from "services/auth.service"; -import { UserService } from "services/user.service"; +import { Button, Input, TOAST_TYPE, setToast } from "@plane/ui"; + +import { CODE_VERIFIED } from "@/constants/event-tracker"; +import { checkEmailValidity } from "@/helpers/string.helper"; +import { useEventTracker } from "@/hooks/store"; +import useTimer from "@/hooks/use-timer"; +import { AuthService } from "@/services/auth.service"; +import { UserService } from "@/services/user.service"; // hooks -import useToast from "hooks/use-toast"; -import useTimer from "hooks/use-timer"; -import { useEventTracker } from "hooks/store"; // ui -import { Button, Input } from "@plane/ui"; // helpers -import { checkEmailValidity } from "helpers/string.helper"; // types -import { IEmailCheckData, IMagicSignInData } from "@plane/types"; // constants -import { CODE_VERIFIED } from "constants/event-tracker"; type Props = { email: string; @@ -44,8 +44,6 @@ export const SignUpUniqueCodeForm: React.FC = (props) => { const [isRequestingNewCode, setIsRequestingNewCode] = useState(false); // store hooks const { captureEvent } = useEventTracker(); - // toast alert - const { setToastAlert } = useToast(); // timer const { timer: resendTimerCode, setTimer: setResendCodeTimer } = useTimer(30); // form info @@ -84,8 +82,8 @@ export const SignUpUniqueCodeForm: React.FC = (props) => { captureEvent(CODE_VERIFIED, { state: "FAILED", }); - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }); @@ -101,8 +99,8 @@ export const SignUpUniqueCodeForm: React.FC = (props) => { .generateUniqueCode(payload) .then(() => { setResendCodeTimer(30); - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "A new unique code has been sent to your email.", }); @@ -112,8 +110,8 @@ export const SignUpUniqueCodeForm: React.FC = (props) => { }); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) @@ -204,8 +202,8 @@ export const SignUpUniqueCodeForm: React.FC = (props) => { {resendTimerCode > 0 ? `Request new code in ${resendTimerCode}s` : isRequestingNewCode - ? "Requesting new code" - : "Request new code"} + ? "Requesting new code" + : "Request new code"}
diff --git a/web/components/analytics/custom-analytics/custom-analytics.tsx b/web/components/analytics/custom-analytics/custom-analytics.tsx index 0c3ec89250b..9f075a8bb5d 100644 --- a/web/components/analytics/custom-analytics/custom-analytics.tsx +++ b/web/components/analytics/custom-analytics/custom-analytics.tsx @@ -1,17 +1,17 @@ +import { observer } from "mobx-react-lite"; import { useRouter } from "next/router"; -import useSWR from "swr"; import { useForm } from "react-hook-form"; -import { observer } from "mobx-react-lite"; +import useSWR from "swr"; +import { IAnalyticsParams } from "@plane/types"; // services -import { AnalyticsService } from "services/analytics.service"; // components -import { CustomAnalyticsSelectBar, CustomAnalyticsMainContent, CustomAnalyticsSidebar } from "components/analytics"; +import { CustomAnalyticsSelectBar, CustomAnalyticsMainContent, CustomAnalyticsSidebar } from "@/components/analytics"; // types -import { IAnalyticsParams } from "@plane/types"; // fetch-keys -import { ANALYTICS } from "constants/fetch-keys"; -import { cn } from "helpers/common.helper"; -import { useApplication } from "hooks/store"; +import { ANALYTICS } from "@/constants/fetch-keys"; +import { cn } from "@/helpers/common.helper"; +import { useApplication } from "@/hooks/store"; +import { AnalyticsService } from "@/services/analytics.service"; type Props = { additionalParams?: Partial; diff --git a/web/components/analytics/custom-analytics/graph/custom-tooltip.tsx b/web/components/analytics/custom-analytics/graph/custom-tooltip.tsx index ec7c4019507..9101f1cf205 100644 --- a/web/components/analytics/custom-analytics/graph/custom-tooltip.tsx +++ b/web/components/analytics/custom-analytics/graph/custom-tooltip.tsx @@ -1,9 +1,9 @@ // nivo import { BarTooltipProps } from "@nivo/bar"; -import { DATE_KEYS } from "constants/analytics"; -import { renderMonthAndYear } from "helpers/analytics.helper"; -// types import { IAnalyticsParams, IAnalyticsResponse } from "@plane/types"; +import { DATE_KEYS } from "@/constants/analytics"; +import { renderMonthAndYear } from "@/helpers/analytics.helper"; +// types type Props = { datum: BarTooltipProps; @@ -60,8 +60,8 @@ export const CustomTooltip: React.FC = ({ datum, analytics, params }) => ? "capitalize" : "" : params.x_axis === "priority" || params.x_axis === "state__group" - ? "capitalize" - : "" + ? "capitalize" + : "" }`} > {params.segment === "assignees__id" ? renderAssigneeName(tooltipValue.toString()) : tooltipValue}: diff --git a/web/components/analytics/custom-analytics/graph/index.tsx b/web/components/analytics/custom-analytics/graph/index.tsx index 51b4089c4f2..742424c7925 100644 --- a/web/components/analytics/custom-analytics/graph/index.tsx +++ b/web/components/analytics/custom-analytics/graph/index.tsx @@ -1,15 +1,15 @@ // nivo import { BarDatum } from "@nivo/bar"; // components -import { CustomTooltip } from "./custom-tooltip"; +import { IAnalyticsParams, IAnalyticsResponse } from "@plane/types"; import { Tooltip } from "@plane/ui"; // ui -import { BarGraph } from "components/ui"; +import { BarGraph } from "@/components/ui"; // helpers -import { findStringWithMostCharacters } from "helpers/array.helper"; -import { generateBarColor, generateDisplayName } from "helpers/analytics.helper"; +import { generateBarColor, generateDisplayName } from "@/helpers/analytics.helper"; +import { findStringWithMostCharacters } from "@/helpers/array.helper"; // types -import { IAnalyticsParams, IAnalyticsResponse } from "@plane/types"; +import { CustomTooltip } from "./custom-tooltip"; type Props = { analytics: IAnalyticsResponse; @@ -101,8 +101,8 @@ export const AnalyticsGraph: React.FC = ({ analytics, barGraphData, param ? generateDisplayName(datum.value, analytics, params, "x_axis")[0].toUpperCase() : "?" : datum.value && datum.value !== "None" - ? `${datum.value}`.toUpperCase()[0] - : "?"} + ? `${datum.value}`.toUpperCase()[0] + : "?"} diff --git a/web/components/analytics/custom-analytics/main-content.tsx b/web/components/analytics/custom-analytics/main-content.tsx index 3c199f8078c..f57edbefddf 100644 --- a/web/components/analytics/custom-analytics/main-content.tsx +++ b/web/components/analytics/custom-analytics/main-content.tsx @@ -1,16 +1,16 @@ import { useRouter } from "next/router"; import { mutate } from "swr"; +import { IAnalyticsParams, IAnalyticsResponse } from "@plane/types"; // components -import { AnalyticsGraph, AnalyticsTable } from "components/analytics"; -// ui import { Button, Loader } from "@plane/ui"; +import { AnalyticsGraph, AnalyticsTable } from "@/components/analytics"; +// ui // helpers -import { convertResponseToBarGraphData } from "helpers/analytics.helper"; +import { ANALYTICS } from "@/constants/fetch-keys"; +import { convertResponseToBarGraphData } from "@/helpers/analytics.helper"; // types -import { IAnalyticsParams, IAnalyticsResponse } from "@plane/types"; // fetch-keys -import { ANALYTICS } from "constants/fetch-keys"; type Props = { analytics: IAnalyticsResponse | undefined; @@ -33,7 +33,7 @@ export const CustomAnalyticsMainContent: React.FC = (props) => { {!error ? ( analytics ? ( analytics.total > 0 ? ( -
+
; @@ -22,8 +22,9 @@ export const CustomAnalyticsSelectBar: React.FC = observer((props) => { return (
{!isProjectLevel && (
diff --git a/web/components/analytics/custom-analytics/select/project.tsx b/web/components/analytics/custom-analytics/select/project.tsx index 3c08e157473..325683904b7 100644 --- a/web/components/analytics/custom-analytics/select/project.tsx +++ b/web/components/analytics/custom-analytics/select/project.tsx @@ -1,8 +1,8 @@ import { observer } from "mobx-react-lite"; // hooks -import { useProject } from "hooks/store"; -// ui import { CustomSearchSelect } from "@plane/ui"; +import { useProject } from "@/hooks/store"; +// ui type Props = { value: string[] | undefined; diff --git a/web/components/analytics/custom-analytics/select/segment.tsx b/web/components/analytics/custom-analytics/select/segment.tsx index 055665d9ee2..07bbb0e3712 100644 --- a/web/components/analytics/custom-analytics/select/segment.tsx +++ b/web/components/analytics/custom-analytics/select/segment.tsx @@ -1,11 +1,11 @@ import { useRouter } from "next/router"; +import { IAnalyticsParams, TXAxisValues } from "@plane/types"; // ui import { CustomSelect } from "@plane/ui"; // types -import { IAnalyticsParams, TXAxisValues } from "@plane/types"; +import { ANALYTICS_X_AXIS_VALUES } from "@/constants/analytics"; // constants -import { ANALYTICS_X_AXIS_VALUES } from "constants/analytics"; type Props = { value: TXAxisValues | null | undefined; diff --git a/web/components/analytics/custom-analytics/select/x-axis.tsx b/web/components/analytics/custom-analytics/select/x-axis.tsx index 74ee99a7708..a0c21b1b674 100644 --- a/web/components/analytics/custom-analytics/select/x-axis.tsx +++ b/web/components/analytics/custom-analytics/select/x-axis.tsx @@ -1,11 +1,11 @@ import { useRouter } from "next/router"; +import { IAnalyticsParams, TXAxisValues } from "@plane/types"; // ui import { CustomSelect } from "@plane/ui"; // types -import { IAnalyticsParams, TXAxisValues } from "@plane/types"; +import { ANALYTICS_X_AXIS_VALUES } from "@/constants/analytics"; // constants -import { ANALYTICS_X_AXIS_VALUES } from "constants/analytics"; type Props = { value: TXAxisValues; diff --git a/web/components/analytics/custom-analytics/select/y-axis.tsx b/web/components/analytics/custom-analytics/select/y-axis.tsx index 9f66c6b5450..a33feb96793 100644 --- a/web/components/analytics/custom-analytics/select/y-axis.tsx +++ b/web/components/analytics/custom-analytics/select/y-axis.tsx @@ -1,9 +1,9 @@ // ui +import { TYAxisValues } from "@plane/types"; import { CustomSelect } from "@plane/ui"; // types -import { TYAxisValues } from "@plane/types"; +import { ANALYTICS_Y_AXIS_VALUES } from "@/constants/analytics"; // constants -import { ANALYTICS_Y_AXIS_VALUES } from "constants/analytics"; type Props = { value: TYAxisValues; diff --git a/web/components/analytics/custom-analytics/sidebar/projects-list.tsx b/web/components/analytics/custom-analytics/sidebar/projects-list.tsx index f7ba07b75c4..7b665e5d817 100644 --- a/web/components/analytics/custom-analytics/sidebar/projects-list.tsx +++ b/web/components/analytics/custom-analytics/sidebar/projects-list.tsx @@ -1,11 +1,11 @@ import { observer } from "mobx-react-lite"; // hooks -import { useProject } from "hooks/store"; // icons import { Contrast, LayoutGrid, Users } from "lucide-react"; // helpers -import { renderEmoji } from "helpers/emoji.helper"; -import { truncateText } from "helpers/string.helper"; +import { ProjectLogo } from "@/components/project"; +import { truncateText } from "@/helpers/string.helper"; +import { useProject } from "@/hooks/store"; type Props = { projectIds: string[]; @@ -19,7 +19,7 @@ export const CustomAnalyticsSidebarProjectsList: React.FC = observer((pro return (

Selected Projects

-
+
{projectIds.map((projectId) => { const project = getProjectById(projectId); @@ -28,21 +28,15 @@ export const CustomAnalyticsSidebarProjectsList: React.FC = observer((pro return (
- {project.emoji ? ( - {renderEmoji(project.emoji)} - ) : project.icon_prop ? ( -
{renderEmoji(project.icon_prop)}
- ) : ( - - {project?.name.charAt(0)} - - )} +
+ +

{truncateText(project.name, 20)}

({project.identifier})
-
+
diff --git a/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx b/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx index 6a7b3c7b9d0..0dad084af04 100644 --- a/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx +++ b/web/components/analytics/custom-analytics/sidebar/sidebar-header.tsx @@ -1,12 +1,13 @@ -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // hooks -import { useCycle, useMember, useModule, useProject } from "hooks/store"; +import { ProjectLogo } from "@/components/project"; +import { NETWORK_CHOICES } from "@/constants/project"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { useCycle, useMember, useModule, useProject } from "@/hooks/store"; +// components // helpers -import { renderEmoji } from "helpers/emoji.helper"; -import { renderFormattedDate } from "helpers/date-time.helper"; // constants -import { NETWORK_CHOICES } from "constants/project"; export const CustomAnalyticsSidebarHeader = observer(() => { const router = useRouter(); @@ -81,15 +82,9 @@ export const CustomAnalyticsSidebarHeader = observer(() => { ) : (
- {projectDetails?.emoji ? ( -
{renderEmoji(projectDetails.emoji)}
- ) : projectDetails?.icon_prop ? ( -
- {renderEmoji(projectDetails.icon_prop)} -
- ) : ( - - {projectDetails?.name.charAt(0)} + {projectDetails && ( + + )}

{projectDetails?.name}

diff --git a/web/components/analytics/custom-analytics/sidebar/sidebar.tsx b/web/components/analytics/custom-analytics/sidebar/sidebar.tsx index 3ad2805f28f..26bb039b0d5 100644 --- a/web/components/analytics/custom-analytics/sidebar/sidebar.tsx +++ b/web/components/analytics/custom-analytics/sidebar/sidebar.tsx @@ -1,25 +1,24 @@ -import { useEffect, } from "react"; -import { useRouter } from "next/router"; +import { useEffect } from "react"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { mutate } from "swr"; // services -import { AnalyticsService } from "services/analytics.service"; // hooks -import { useCycle, useModule, useProject, useUser, useWorkspace } from "hooks/store"; -import useToast from "hooks/use-toast"; // components -import { CustomAnalyticsSidebarHeader, CustomAnalyticsSidebarProjectsList } from "components/analytics"; // ui -import { Button, LayersIcon } from "@plane/ui"; -// icons import { CalendarDays, Download, RefreshCw } from "lucide-react"; +import { IAnalyticsParams, IAnalyticsResponse, IExportAnalyticsFormData, IWorkspace } from "@plane/types"; +import { Button, LayersIcon, TOAST_TYPE, setToast } from "@plane/ui"; +// icons +import { CustomAnalyticsSidebarHeader, CustomAnalyticsSidebarProjectsList } from "@/components/analytics"; // helpers -import { renderFormattedDate } from "helpers/date-time.helper"; // types -import { IAnalyticsParams, IAnalyticsResponse, IExportAnalyticsFormData, IWorkspace } from "@plane/types"; // fetch-keys -import { ANALYTICS } from "constants/fetch-keys"; -import { cn } from "helpers/common.helper"; +import { ANALYTICS } from "@/constants/fetch-keys"; +import { cn } from "@/helpers/common.helper"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { useCycle, useModule, useProject, useUser, useWorkspace } from "@/hooks/store"; +import { AnalyticsService } from "@/services/analytics.service"; type Props = { analytics: IAnalyticsResponse | undefined; @@ -34,8 +33,6 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => { // router const router = useRouter(); const { workspaceSlug, projectId, cycleId, moduleId } = router.query; - // toast alert - const { setToastAlert } = useToast(); // store hooks const { currentUser } = useUser(); const { workspaceProjectIds, getProjectById } = useProject(); @@ -107,8 +104,8 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => { analyticsService .exportAnalytics(workspaceSlug.toString(), data) .then((res) => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: res.message, }); @@ -116,8 +113,8 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => { trackExportAnalytics(); }) .catch(() => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: "There was some error in exporting the analytics. Please try again.", }) @@ -146,7 +143,7 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => { return (
@@ -163,8 +160,8 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => { (cycleId ? cycleDetails?.created_at : moduleId - ? moduleDetails?.created_at - : projectDetails?.created_at) ?? "" + ? moduleDetails?.created_at + : projectDetails?.created_at) ?? "" )}
)} @@ -179,10 +176,10 @@ export const CustomAnalyticsSidebar: React.FC = observer((props) => {
-
+
-
+
{defaultAnalytics?.open_issues_classified.map((group) => { const percentage = ((group.state_count / defaultAnalytics.total_issues) * 100).toFixed(0); @@ -50,14 +48,5 @@ export const AnalyticsDemand: React.FC = ({ defaultAnalytics }) => ( ); })}
-
-

- - Estimate Demand: -

-

- {defaultAnalytics.open_estimate_sum}/{defaultAnalytics.total_estimate_sum} -

-
); diff --git a/web/components/analytics/scope-and-demand/leaderboard.tsx b/web/components/analytics/scope-and-demand/leaderboard.tsx index 9cd38dde4ad..e6473b8745f 100644 --- a/web/components/analytics/scope-and-demand/leaderboard.tsx +++ b/web/components/analytics/scope-and-demand/leaderboard.tsx @@ -1,5 +1,5 @@ // ui -import { ProfileEmptyState } from "components/ui"; +import { ProfileEmptyState } from "@/components/ui"; // image import emptyUsers from "public/empty-state/empty_users.svg"; @@ -24,7 +24,7 @@ export const AnalyticsLeaderBoard: React.FC = ({ users, title, emptyState
{users.map((user) => ( = ({ users, title, emptyState {user.display_name
) : (
- {user.display_name !== "" ? user?.display_name?.[0] : "?"} + {user?.display_name !== "" ? user?.display_name?.[0] : "?"}
)} - {user.display_name !== "" ? `${user.display_name}` : "No assignee"} + {user?.display_name !== "" ? `${user?.display_name}` : "No assignee"}
{user.count} diff --git a/web/components/analytics/scope-and-demand/scope-and-demand.tsx b/web/components/analytics/scope-and-demand/scope-and-demand.tsx index 6f26ad73ff9..3c5e3f48e78 100644 --- a/web/components/analytics/scope-and-demand/scope-and-demand.tsx +++ b/web/components/analytics/scope-and-demand/scope-and-demand.tsx @@ -3,13 +3,13 @@ import { useRouter } from "next/router"; import useSWR from "swr"; // services -import { AnalyticsService } from "services/analytics.service"; // components -import { AnalyticsDemand, AnalyticsLeaderBoard, AnalyticsScope, AnalyticsYearWiseIssues } from "components/analytics"; -// ui import { Button, Loader } from "@plane/ui"; +import { AnalyticsDemand, AnalyticsLeaderBoard, AnalyticsScope, AnalyticsYearWiseIssues } from "@/components/analytics"; +// ui // fetch-keys -import { DEFAULT_ANALYTICS } from "constants/fetch-keys"; +import { DEFAULT_ANALYTICS } from "@/constants/fetch-keys"; +import { AnalyticsService } from "@/services/analytics.service"; type Props = { fullScreen?: boolean; diff --git a/web/components/analytics/scope-and-demand/scope.tsx b/web/components/analytics/scope-and-demand/scope.tsx index ea1a51937d4..527761e96c5 100644 --- a/web/components/analytics/scope-and-demand/scope.tsx +++ b/web/components/analytics/scope-and-demand/scope.tsx @@ -1,9 +1,9 @@ // ui -import { BarGraph, ProfileEmptyState } from "components/ui"; +import { IDefaultAnalyticsResponse } from "@plane/types"; +import { BarGraph, ProfileEmptyState } from "@/components/ui"; // image import emptyBarGraph from "public/empty-state/empty_bar_graph.svg"; // types -import { IDefaultAnalyticsResponse } from "@plane/types"; type Props = { defaultAnalytics: IDefaultAnalyticsResponse; diff --git a/web/components/analytics/scope-and-demand/year-wise-issues.tsx b/web/components/analytics/scope-and-demand/year-wise-issues.tsx index 2a62c99d4bd..cbd0e155c82 100644 --- a/web/components/analytics/scope-and-demand/year-wise-issues.tsx +++ b/web/components/analytics/scope-and-demand/year-wise-issues.tsx @@ -1,11 +1,11 @@ // ui -import { LineGraph, ProfileEmptyState } from "components/ui"; +import { IDefaultAnalyticsResponse } from "@plane/types"; +import { LineGraph, ProfileEmptyState } from "@/components/ui"; // image +import { MONTHS_LIST } from "@/constants/calendar"; import emptyGraph from "public/empty-state/empty_graph.svg"; // types -import { IDefaultAnalyticsResponse } from "@plane/types"; // constants -import { MONTHS_LIST } from "constants/calendar"; type Props = { defaultAnalytics: IDefaultAnalyticsResponse; diff --git a/web/components/api-token/delete-token-modal.tsx b/web/components/api-token/delete-token-modal.tsx index 993289c10c7..4c511de4afe 100644 --- a/web/components/api-token/delete-token-modal.tsx +++ b/web/components/api-token/delete-token-modal.tsx @@ -2,16 +2,14 @@ import { useState, Fragment, FC } from "react"; import { useRouter } from "next/router"; import { mutate } from "swr"; import { Dialog, Transition } from "@headlessui/react"; +import { IApiToken } from "@plane/types"; // services -import { APITokenService } from "services/api_token.service"; -// hooks -import useToast from "hooks/use-toast"; +import { Button, TOAST_TYPE, setToast } from "@plane/ui"; +import { API_TOKENS_LIST } from "@/constants/fetch-keys"; +import { APITokenService } from "@/services/api_token.service"; // ui -import { Button } from "@plane/ui"; // types -import { IApiToken } from "@plane/types"; // fetch-keys -import { API_TOKENS_LIST } from "constants/fetch-keys"; type Props = { isOpen: boolean; @@ -25,8 +23,6 @@ export const DeleteApiTokenModal: FC = (props) => { const { isOpen, onClose, tokenId } = props; // states const [deleteLoading, setDeleteLoading] = useState(false); - // hooks - const { setToastAlert } = useToast(); // router const router = useRouter(); const { workspaceSlug } = router.query; @@ -44,8 +40,8 @@ export const DeleteApiTokenModal: FC = (props) => { apiTokenService .deleteApiToken(workspaceSlug.toString(), tokenId) .then(() => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Token deleted successfully.", }); @@ -59,8 +55,8 @@ export const DeleteApiTokenModal: FC = (props) => { handleClose(); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error", message: err?.message ?? "Something went wrong. Please try again.", }) diff --git a/web/components/api-token/modal/create-token-modal.tsx b/web/components/api-token/modal/create-token-modal.tsx index b3fc3df78ec..32305ba5be0 100644 --- a/web/components/api-token/modal/create-token-modal.tsx +++ b/web/components/api-token/modal/create-token-modal.tsx @@ -2,19 +2,20 @@ import React, { useState } from "react"; import { useRouter } from "next/router"; import { mutate } from "swr"; import { Dialog, Transition } from "@headlessui/react"; +import { IApiToken } from "@plane/types"; // services -import { APITokenService } from "services/api_token.service"; -// hooks -import useToast from "hooks/use-toast"; +import { TOAST_TYPE, setToast } from "@plane/ui"; + +import { CreateApiTokenForm, GeneratedTokenDetails } from "@/components/api-token"; +import { API_TOKENS_LIST } from "@/constants/fetch-keys"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { csvDownload } from "@/helpers/download.helper"; +import { APITokenService } from "@/services/api_token.service"; +// ui // components -import { CreateApiTokenForm, GeneratedTokenDetails } from "components/api-token"; // helpers -import { csvDownload } from "helpers/download.helper"; -import { renderFormattedDate } from "helpers/date-time.helper"; // types -import { IApiToken } from "@plane/types"; // fetch-keys -import { API_TOKENS_LIST } from "constants/fetch-keys"; type Props = { isOpen: boolean; @@ -32,8 +33,6 @@ export const CreateApiTokenModal: React.FC = (props) => { // router const router = useRouter(); const { workspaceSlug } = router.query; - // toast alert - const { setToastAlert } = useToast(); const handleClose = () => { onClose(); @@ -76,10 +75,10 @@ export const CreateApiTokenModal: React.FC = (props) => { ); }) .catch((err) => { - setToastAlert({ - message: err.message, - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error", + message: err.message, }); throw err; diff --git a/web/components/api-token/modal/form.tsx b/web/components/api-token/modal/form.tsx index 77753e64d20..b2b3f64dcbe 100644 --- a/web/components/api-token/modal/form.tsx +++ b/web/components/api-token/modal/form.tsx @@ -1,16 +1,14 @@ import { useState } from "react"; import { add } from "date-fns"; import { Controller, useForm } from "react-hook-form"; -import { DateDropdown } from "components/dropdowns"; import { Calendar } from "lucide-react"; -// hooks -import useToast from "hooks/use-toast"; +import { IApiToken } from "@plane/types"; // ui -import { Button, CustomSelect, Input, TextArea, ToggleSwitch } from "@plane/ui"; +import { Button, CustomSelect, Input, TextArea, ToggleSwitch, TOAST_TYPE, setToast } from "@plane/ui"; +import { DateDropdown } from "@/components/dropdowns"; // helpers -import { renderFormattedDate, renderFormattedPayloadDate } from "helpers/date-time.helper"; +import { renderFormattedDate, renderFormattedPayloadDate } from "@/helpers/date-time.helper"; // types -import { IApiToken } from "@plane/types"; type Props = { handleClose: () => void; @@ -66,8 +64,6 @@ export const CreateApiTokenForm: React.FC = (props) => { const { handleClose, neverExpires, toggleNeverExpires, onSubmit } = props; // states const [customDate, setCustomDate] = useState(null); - // toast alert - const { setToastAlert } = useToast(); // form const { control, @@ -80,8 +76,8 @@ export const CreateApiTokenForm: React.FC = (props) => { const handleFormSubmit = async (data: IApiToken) => { // if never expires is toggled off, and the user has not selected a custom date or a predefined date, show an error if (!neverExpires && (!data.expired_at || (data.expired_at === "custom" && !customDate))) - return setToastAlert({ - type: "error", + return setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: "Please select an expiration date.", }); @@ -94,7 +90,7 @@ export const CreateApiTokenForm: React.FC = (props) => { // if never expires is toggled on, set expired_at to null if (neverExpires) payload.expired_at = null; // if never expires is toggled off, and the user has selected a custom date, set expired_at to the custom date - else if (data.expired_at === "custom") payload.expired_at = renderFormattedPayloadDate(customDate ?? new Date()); + else if (data.expired_at === "custom") payload.expired_at = renderFormattedPayloadDate(customDate); // if never expires is toggled off, and the user has selected a predefined date, set expired_at to the predefined date else { const expiryDate = getExpiryDate(data.expired_at ?? ""); @@ -174,8 +170,8 @@ export const CreateApiTokenForm: React.FC = (props) => { {value === "custom" ? "Custom date" : selectedOption - ? selectedOption.label - : "Set expiration date"} + ? selectedOption.label + : "Set expiration date"}
} value={value} @@ -211,8 +207,8 @@ export const CreateApiTokenForm: React.FC = (props) => { ? `Expires ${renderFormattedDate(customDate)}` : null : watch("expired_at") - ? `Expires ${getExpiryDate(watch("expired_at") ?? "")}` - : null} + ? `Expires ${getExpiryDate(watch("expired_at") ?? "")}` + : null} )}
diff --git a/web/components/api-token/modal/generated-token-details.tsx b/web/components/api-token/modal/generated-token-details.tsx index f28ea348126..d2bbfd7a1fd 100644 --- a/web/components/api-token/modal/generated-token-details.tsx +++ b/web/components/api-token/modal/generated-token-details.tsx @@ -1,13 +1,13 @@ import { Copy } from "lucide-react"; -// hooks -import useToast from "hooks/use-toast"; +import { IApiToken } from "@plane/types"; // ui -import { Button, Tooltip } from "@plane/ui"; +import { Button, Tooltip, TOAST_TYPE, setToast } from "@plane/ui"; // helpers -import { renderFormattedDate } from "helpers/date-time.helper"; -import { copyTextToClipboard } from "helpers/string.helper"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { copyTextToClipboard } from "@/helpers/string.helper"; // types -import { IApiToken } from "@plane/types"; +import { usePlatformOS } from "@/hooks/use-platform-os"; +// hooks type Props = { handleClose: () => void; @@ -16,13 +16,11 @@ type Props = { export const GeneratedTokenDetails: React.FC = (props) => { const { handleClose, tokenDetails } = props; - - const { setToastAlert } = useToast(); - + const { isMobile } = usePlatformOS(); const copyApiToken = (token: string) => { copyTextToClipboard(token).then(() => - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Token copied to clipboard.", }) @@ -44,7 +42,7 @@ export const GeneratedTokenDetails: React.FC = (props) => { className="mt-4 flex w-full items-center justify-between rounded-md border-[0.5px] border-custom-border-200 px-3 py-2 text-sm font-medium outline-none" > {tokenDetails.token} - + diff --git a/web/components/api-token/token-list-item.tsx b/web/components/api-token/token-list-item.tsx index 2de73122280..5ac7ec681c1 100644 --- a/web/components/api-token/token-list-item.tsx +++ b/web/components/api-token/token-list-item.tsx @@ -1,13 +1,14 @@ import { useState } from "react"; import { XCircle } from "lucide-react"; +import { IApiToken } from "@plane/types"; // components -import { DeleteApiTokenModal } from "components/api-token"; -// ui import { Tooltip } from "@plane/ui"; +import { DeleteApiTokenModal } from "@/components/api-token"; +import { renderFormattedDate, calculateTimeAgo } from "@/helpers/date-time.helper"; +import { usePlatformOS } from "@/hooks/use-platform-os"; +// ui // helpers -import { renderFormattedDate, calculateTimeAgo } from "helpers/date-time.helper"; // types -import { IApiToken } from "@plane/types"; type Props = { token: IApiToken; @@ -17,12 +18,14 @@ export const ApiTokenListItem: React.FC = (props) => { const { token } = props; // states const [deleteModalOpen, setDeleteModalOpen] = useState(false); + // hooks + const { isMobile } = usePlatformOS(); return ( <> setDeleteModalOpen(false)} tokenId={token.id} />
- +
@@ -130,6 +132,8 @@ export const SelectMonthModal: React.FC = ({ type, initialValues, isOpen, hasError={Boolean(errors.archive_in)} placeholder="Enter Months" className="w-full border-custom-border-200" + min={1} + max={12} /> Months
diff --git a/web/components/breadcrumbs/index.tsx b/web/components/breadcrumbs/index.tsx index 16fa1e33333..de93cdec3f3 100644 --- a/web/components/breadcrumbs/index.tsx +++ b/web/components/breadcrumbs/index.tsx @@ -1,6 +1,6 @@ import * as React from "react"; -import { useRouter } from "next/router"; import Link from "next/link"; +import { useRouter } from "next/router"; // icons import { MoveLeft } from "lucide-react"; diff --git a/web/components/command-palette/actions/help-actions.tsx b/web/components/command-palette/actions/help-actions.tsx index 4aaaab33a95..539ec00aa5e 100644 --- a/web/components/command-palette/actions/help-actions.tsx +++ b/web/components/command-palette/actions/help-actions.tsx @@ -1,9 +1,9 @@ import { Command } from "cmdk"; import { FileText, GithubIcon, MessageSquare, Rocket } from "lucide-react"; // hooks -import { useApplication } from "hooks/store"; -// ui import { DiscordIcon } from "@plane/ui"; +import { useApplication } from "@/hooks/store"; +// ui type Props = { closePalette: () => void; @@ -69,7 +69,9 @@ export const CommandPaletteHelpActions: React.FC = (props) => { { closePalette(); - (window as any)?.$crisp.push(["do", "chat:open"]); + if (window) { + window.$crisp.push(["do", "chat:show"]); + } }} className="focus:outline-none" > diff --git a/web/components/command-palette/actions/issue-actions/actions-list.tsx b/web/components/command-palette/actions/issue-actions/actions-list.tsx index 55f72c85d11..04bcc989953 100644 --- a/web/components/command-palette/actions/issue-actions/actions-list.tsx +++ b/web/components/command-palette/actions/issue-actions/actions-list.tsx @@ -1,18 +1,16 @@ -import { useRouter } from "next/router"; -import { observer } from "mobx-react-lite"; import { Command } from "cmdk"; +import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { LinkIcon, Signal, Trash2, UserMinus2, UserPlus2 } from "lucide-react"; +import { TIssue } from "@plane/types"; // hooks -import { useApplication, useUser, useIssues } from "hooks/store"; -// hooks -import useToast from "hooks/use-toast"; +import { DoubleCircleIcon, UserGroupIcon, TOAST_TYPE, setToast } from "@plane/ui"; +import { EIssuesStoreType } from "@/constants/issue"; +import { copyTextToClipboard } from "@/helpers/string.helper"; +import { useApplication, useUser, useIssues } from "@/hooks/store"; // ui -import { DoubleCircleIcon, UserGroupIcon } from "@plane/ui"; // helpers -import { copyTextToClipboard } from "helpers/string.helper"; // types -import { TIssue } from "@plane/types"; -import { EIssuesStoreType } from "constants/issue"; type Props = { closePalette: () => void; @@ -37,8 +35,6 @@ export const CommandPaletteIssueActions: React.FC = observer((props) => { } = useApplication(); const { currentUser } = useUser(); - const { setToastAlert } = useToast(); - const handleUpdateIssue = async (formData: Partial) => { if (!workspaceSlug || !projectId || !issueDetails) return; @@ -71,14 +67,14 @@ export const CommandPaletteIssueActions: React.FC = observer((props) => { const url = new URL(window.location.href); copyTextToClipboard(url.href) .then(() => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Copied to clipboard", }); }) .catch(() => { - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Some error occurred", }); }); diff --git a/web/components/command-palette/actions/issue-actions/change-assignee.tsx b/web/components/command-palette/actions/issue-actions/change-assignee.tsx index 96fba41f6e2..4797bb45f63 100644 --- a/web/components/command-palette/actions/issue-actions/change-assignee.tsx +++ b/web/components/command-palette/actions/issue-actions/change-assignee.tsx @@ -1,14 +1,14 @@ -import { useRouter } from "next/router"; -import { observer } from "mobx-react-lite"; import { Command } from "cmdk"; +import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { Check } from "lucide-react"; +import { TIssue } from "@plane/types"; // mobx store -import { useIssues, useMember } from "hooks/store"; -// ui import { Avatar } from "@plane/ui"; +import { EIssuesStoreType } from "@/constants/issue"; +import { useIssues, useMember } from "@/hooks/store"; +// ui // types -import { TIssue } from "@plane/types"; -import { EIssuesStoreType } from "constants/issue"; type Props = { closePalette: () => void; diff --git a/web/components/command-palette/actions/issue-actions/change-priority.tsx b/web/components/command-palette/actions/issue-actions/change-priority.tsx index 8d1c482610a..6d2724859a0 100644 --- a/web/components/command-palette/actions/issue-actions/change-priority.tsx +++ b/web/components/command-palette/actions/issue-actions/change-priority.tsx @@ -1,15 +1,15 @@ -import { useRouter } from "next/router"; -import { observer } from "mobx-react-lite"; import { Command } from "cmdk"; +import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { Check } from "lucide-react"; +import { TIssue, TIssuePriorities } from "@plane/types"; // mobx store -import { useIssues } from "hooks/store"; -// ui import { PriorityIcon } from "@plane/ui"; +import { EIssuesStoreType, ISSUE_PRIORITIES } from "@/constants/issue"; +import { useIssues } from "@/hooks/store"; +// ui // types -import { TIssue, TIssuePriorities } from "@plane/types"; // constants -import { EIssuesStoreType, ISSUE_PRIORITIES } from "constants/issue"; type Props = { closePalette: () => void; diff --git a/web/components/command-palette/actions/issue-actions/change-state.tsx b/web/components/command-palette/actions/issue-actions/change-state.tsx index 7841a4a1e65..32c6a0de5ff 100644 --- a/web/components/command-palette/actions/issue-actions/change-state.tsx +++ b/web/components/command-palette/actions/issue-actions/change-state.tsx @@ -1,15 +1,15 @@ -import { useRouter } from "next/router"; -import { observer } from "mobx-react-lite"; import { Command } from "cmdk"; +import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // hooks -import { useProjectState, useIssues } from "hooks/store"; -// ui +import { Check } from "lucide-react"; +import { TIssue } from "@plane/types"; import { Spinner, StateGroupIcon } from "@plane/ui"; +import { EIssuesStoreType } from "@/constants/issue"; +import { useProjectState, useIssues } from "@/hooks/store"; +// ui // icons -import { Check } from "lucide-react"; // types -import { TIssue } from "@plane/types"; -import { EIssuesStoreType } from "constants/issue"; type Props = { closePalette: () => void; diff --git a/web/components/command-palette/actions/project-actions.tsx b/web/components/command-palette/actions/project-actions.tsx index bdd08a0d83f..297d1ba364f 100644 --- a/web/components/command-palette/actions/project-actions.tsx +++ b/web/components/command-palette/actions/project-actions.tsx @@ -1,9 +1,9 @@ import { Command } from "cmdk"; import { ContrastIcon, FileText } from "lucide-react"; // hooks -import { useApplication, useEventTracker } from "hooks/store"; -// ui import { DiceIcon, PhotoFilterIcon } from "@plane/ui"; +import { useApplication, useEventTracker } from "@/hooks/store"; +// ui type Props = { closePalette: () => void; diff --git a/web/components/command-palette/actions/search-results.tsx b/web/components/command-palette/actions/search-results.tsx index 769a26be7f4..489794295b6 100644 --- a/web/components/command-palette/actions/search-results.tsx +++ b/web/components/command-palette/actions/search-results.tsx @@ -1,9 +1,9 @@ -import { useRouter } from "next/router"; import { Command } from "cmdk"; +import { useRouter } from "next/router"; +import { IWorkspaceSearchResults } from "@plane/types"; // helpers -import { commandGroups } from "components/command-palette"; +import { commandGroups } from "@/components/command-palette"; // types -import { IWorkspaceSearchResults } from "@plane/types"; type Props = { closePalette: () => void; diff --git a/web/components/command-palette/actions/theme-actions.tsx b/web/components/command-palette/actions/theme-actions.tsx index 976a63c871d..b635881b567 100644 --- a/web/components/command-palette/actions/theme-actions.tsx +++ b/web/components/command-palette/actions/theme-actions.tsx @@ -1,13 +1,14 @@ import React, { FC, useEffect, useState } from "react"; import { Command } from "cmdk"; +import { observer } from "mobx-react-lite"; import { useTheme } from "next-themes"; import { Settings } from "lucide-react"; -import { observer } from "mobx-react-lite"; // hooks -import { useUser } from "hooks/store"; -import useToast from "hooks/use-toast"; +import { TOAST_TYPE, setToast } from "@plane/ui"; +import { THEME_OPTIONS } from "@/constants/themes"; +import { useUser } from "@/hooks/store"; +// ui // constants -import { THEME_OPTIONS } from "constants/themes"; type Props = { closePalette: () => void; @@ -21,15 +22,14 @@ export const CommandPaletteThemeActions: FC = observer((props) => { const { updateCurrentUserTheme } = useUser(); // hooks const { setTheme } = useTheme(); - const { setToastAlert } = useToast(); const updateUserTheme = async (newTheme: string) => { setTheme(newTheme); return updateCurrentUserTheme(newTheme).catch(() => { - setToastAlert({ + setToast({ + type: TOAST_TYPE.ERROR, title: "Failed to save user theme settings!", - type: "error", }); }); }; diff --git a/web/components/command-palette/actions/workspace-settings-actions.tsx b/web/components/command-palette/actions/workspace-settings-actions.tsx index 1f05234f41d..56c118a51ad 100644 --- a/web/components/command-palette/actions/workspace-settings-actions.tsx +++ b/web/components/command-palette/actions/workspace-settings-actions.tsx @@ -1,10 +1,10 @@ -import { useRouter } from "next/router"; import { Command } from "cmdk"; // hooks -import { useUser } from "hooks/store"; import Link from "next/link"; +import { useRouter } from "next/router"; // constants -import { EUserWorkspaceRoles, WORKSPACE_SETTINGS_LINKS } from "constants/workspace"; +import { EUserWorkspaceRoles, WORKSPACE_SETTINGS_LINKS } from "@/constants/workspace"; +import { useUser } from "@/hooks/store"; type Props = { closePalette: () => void; diff --git a/web/components/command-palette/command-modal.tsx b/web/components/command-palette/command-modal.tsx index b52976aa8c8..aa36eabed7f 100644 --- a/web/components/command-palette/command-modal.tsx +++ b/web/components/command-palette/command-modal.tsx @@ -1,18 +1,14 @@ import React, { useEffect, useState } from "react"; -import { useRouter } from "next/router"; -import useSWR from "swr"; import { Command } from "cmdk"; -import { Dialog, Transition } from "@headlessui/react"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; +import useSWR from "swr"; import { FolderPlus, Search, Settings } from "lucide-react"; +import { Dialog, Transition } from "@headlessui/react"; +// icons +import { IWorkspaceSearchResults } from "@plane/types"; // hooks -import { useApplication, useEventTracker, useProject } from "hooks/store"; -// services -import { WorkspaceService } from "services/workspace.service"; -import { IssueService } from "services/issue"; -// hooks -import useDebounce from "hooks/use-debounce"; -// components +import { LayersIcon, Loader, ToggleSwitch, Tooltip } from "@plane/ui"; import { CommandPaletteThemeActions, ChangeIssueAssignee, @@ -23,20 +19,29 @@ import { CommandPaletteProjectActions, CommandPaletteWorkspaceSettingsActions, CommandPaletteSearchResults, -} from "components/command-palette"; -import { LayersIcon, Loader, ToggleSwitch, Tooltip } from "@plane/ui"; +} from "@/components/command-palette"; +import { EmptyState } from "@/components/empty-state"; +import { EmptyStateType } from "@/constants/empty-state"; +import { ISSUE_DETAILS } from "@/constants/fetch-keys"; +import { useApplication, useEventTracker, useProject } from "@/hooks/store"; +import useDebounce from "@/hooks/use-debounce"; +import { usePlatformOS } from "@/hooks/use-platform-os"; +// services +import { IssueService } from "@/services/issue"; +import { WorkspaceService } from "@/services/workspace.service"; +// ui +// components // types -import { IWorkspaceSearchResults } from "@plane/types"; // fetch-keys -import { ISSUE_DETAILS } from "constants/fetch-keys"; +// constants -// services const workspaceService = new WorkspaceService(); const issueService = new IssueService(); export const CommandModal: React.FC = observer(() => { // hooks - const { getProjectById } = useProject(); + const { getProjectById, workspaceProjectIds } = useProject(); + const { isMobile } = usePlatformOS(); // states const [placeholder, setPlaceholder] = useState("Type a command or search..."); const [resultsCount, setResultsCount] = useState(0); @@ -197,7 +202,7 @@ export const CommandModal: React.FC = observer(() => {
)} {projectId && ( - +
diff --git a/web/components/core/modals/user-image-upload-modal.tsx b/web/components/core/modals/user-image-upload-modal.tsx index 6debc2c15e2..cf43595a0c3 100644 --- a/web/components/core/modals/user-image-upload-modal.tsx +++ b/web/components/core/modals/user-image-upload-modal.tsx @@ -1,19 +1,18 @@ import React, { useState } from "react"; import { observer } from "mobx-react-lite"; import { useDropzone } from "react-dropzone"; +import { UserCircle2 } from "lucide-react"; import { Transition, Dialog } from "@headlessui/react"; // hooks -import { useApplication } from "hooks/store"; +import { Button, TOAST_TYPE, setToast } from "@plane/ui"; + +import { MAX_FILE_SIZE } from "@/constants/common"; +import { useApplication } from "@/hooks/store"; // services -import { FileService } from "services/file.service"; -// hooks -import useToast from "hooks/use-toast"; +import { FileService } from "@/services/file.service"; // ui -import { Button } from "@plane/ui"; // icons -import { UserCircle2 } from "lucide-react"; // constants -import { MAX_FILE_SIZE } from "constants/common"; type Props = { handleDelete?: () => void; @@ -32,8 +31,6 @@ export const UserImageUploadModal: React.FC = observer((props) => { // states const [image, setImage] = useState(null); const [isImageUploading, setIsImageUploading] = useState(false); - // toast alert - const { setToastAlert } = useToast(); // store hooks const { config: { envConfig }, @@ -76,8 +73,8 @@ export const UserImageUploadModal: React.FC = observer((props) => { if (value) fileService.deleteUserFile(value); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) diff --git a/web/components/core/modals/workspace-image-upload-modal.tsx b/web/components/core/modals/workspace-image-upload-modal.tsx index e04ccf8209d..8fbadf3712f 100644 --- a/web/components/core/modals/workspace-image-upload-modal.tsx +++ b/web/components/core/modals/workspace-image-upload-modal.tsx @@ -1,20 +1,18 @@ import React, { useState } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { useDropzone } from "react-dropzone"; +import { UserCircle2 } from "lucide-react"; import { Transition, Dialog } from "@headlessui/react"; // hooks -import { useApplication, useWorkspace } from "hooks/store"; +import { Button, TOAST_TYPE, setToast } from "@plane/ui"; +import { MAX_FILE_SIZE } from "@/constants/common"; +import { useApplication, useWorkspace } from "@/hooks/store"; // services -import { FileService } from "services/file.service"; -// hooks -import useToast from "hooks/use-toast"; +import { FileService } from "@/services/file.service"; // ui -import { Button } from "@plane/ui"; // icons -import { UserCircle2 } from "lucide-react"; // constants -import { MAX_FILE_SIZE } from "constants/common"; type Props = { handleRemove?: () => void; @@ -37,8 +35,6 @@ export const WorkspaceImageUploadModal: React.FC = observer((props) => { const router = useRouter(); const { workspaceSlug } = router.query; - const { setToastAlert } = useToast(); - const { config: { envConfig }, } = useApplication(); @@ -83,8 +79,8 @@ export const WorkspaceImageUploadModal: React.FC = observer((props) => { if (value && currentWorkspace) fileService.deleteFile(currentWorkspace.id, value); }) .catch((err) => - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: err?.error ?? "Something went wrong. Please try again.", }) diff --git a/web/components/core/render-if-visible-HOC.tsx b/web/components/core/render-if-visible-HOC.tsx index 24ae19fe79b..585ba7769e7 100644 --- a/web/components/core/render-if-visible-HOC.tsx +++ b/web/components/core/render-if-visible-HOC.tsx @@ -1,10 +1,10 @@ -import { cn } from "helpers/common.helper"; import React, { useState, useRef, useEffect, ReactNode, MutableRefObject } from "react"; +import { cn } from "@/helpers/common.helper"; type Props = { defaultHeight?: string; verticalOffset?: number; - horizonatlOffset?: number; + horizontalOffset?: number; root?: MutableRefObject; children: ReactNode; as?: keyof JSX.IntrinsicElements; @@ -20,7 +20,7 @@ const RenderIfVisible: React.FC = (props) => { defaultHeight = "300px", root, verticalOffset = 50, - horizonatlOffset = 0, + horizontalOffset = 0, as = "div", children, classNames = "", @@ -52,17 +52,18 @@ const RenderIfVisible: React.FC = (props) => { }, { root: root?.current, - rootMargin: `${verticalOffset}% ${horizonatlOffset}% ${verticalOffset}% ${horizonatlOffset}%`, + rootMargin: `${verticalOffset}% ${horizontalOffset}% ${verticalOffset}% ${horizontalOffset}%`, } ); observer.observe(intersectionRef.current); return () => { if (intersectionRef.current) { + // eslint-disable-next-line react-hooks/exhaustive-deps observer.unobserve(intersectionRef.current); } }; } - }, [root?.current, intersectionRef, children, changingReference]); + }, [intersectionRef, children, changingReference, root, verticalOffset, horizontalOffset]); //Set height after render useEffect(() => { diff --git a/web/components/core/sidebar/links-list.tsx b/web/components/core/sidebar/links-list.tsx index 48a5e16b721..83db67c3472 100644 --- a/web/components/core/sidebar/links-list.tsx +++ b/web/components/core/sidebar/links-list.tsx @@ -1,37 +1,35 @@ -// ui -import { ExternalLinkIcon, Tooltip } from "@plane/ui"; +import { observer } from "mobx-react"; // icons import { Pencil, Trash2, LinkIcon } from "lucide-react"; -// helpers -import { calculateTimeAgo } from "helpers/date-time.helper"; -// types import { ILinkDetails, UserAuth } from "@plane/types"; +// ui +import { ExternalLinkIcon, Tooltip, TOAST_TYPE, setToast } from "@plane/ui"; +// helpers +import { calculateTimeAgo } from "@/helpers/date-time.helper"; // hooks -import useToast from "hooks/use-toast"; -import { observer } from "mobx-react"; -import { useMeasure } from "@nivo/core"; -import { useMember } from "hooks/store"; +import { useMember } from "@/hooks/store"; +import { usePlatformOS } from "@/hooks/use-platform-os"; +// types type Props = { links: ILinkDetails[]; handleDeleteLink: (linkId: string) => void; handleEditLink: (link: ILinkDetails) => void; userAuth: UserAuth; + disabled?: boolean; }; -export const LinksList: React.FC = observer(({ links, handleDeleteLink, handleEditLink, userAuth }) => { - // toast - const { setToastAlert } = useToast(); +export const LinksList: React.FC = observer(({ links, handleDeleteLink, handleEditLink, userAuth, disabled }) => { const { getUserDetails } = useMember(); - - const isNotAllowed = userAuth.isGuest || userAuth.isViewer; + const { isMobile } = usePlatformOS(); + const isNotAllowed = userAuth.isGuest || userAuth.isViewer || disabled; const copyToClipboard = (text: string) => { navigator.clipboard.writeText(text); - setToastAlert({ - message: "The URL has been successfully copied to your clipboard", - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Copied to clipboard", + message: "The URL has been successfully copied to your clipboard", }); }; @@ -46,7 +44,7 @@ export const LinksList: React.FC = observer(({ links, handleDeleteLink, h - + copyToClipboard(link.title && link.title !== "" ? link.title : link.url)} diff --git a/web/components/core/sidebar/progress-chart.tsx b/web/components/core/sidebar/progress-chart.tsx index ca21756fc31..68b1708fe82 100644 --- a/web/components/core/sidebar/progress-chart.tsx +++ b/web/components/core/sidebar/progress-chart.tsx @@ -1,17 +1,18 @@ import React from "react"; import { eachDayOfInterval, isValid } from "date-fns"; +import { TCompletionChartDistribution } from "@plane/types"; // ui -import { LineGraph } from "components/ui"; +import { LineGraph } from "@/components/ui"; // helpers -import { renderFormattedDateWithoutYear } from "helpers/date-time.helper"; +import { getDate, renderFormattedDateWithoutYear } from "@/helpers/date-time.helper"; //types -import { TCompletionChartDistribution } from "@plane/types"; type Props = { distribution: TCompletionChartDistribution; startDate: string | Date; endDate: string | Date; totalIssues: number; + className?: string; }; const styleById = { @@ -40,18 +41,18 @@ const DashedLine = ({ series, lineGenerator, xScale, yScale }: any) => /> )); -const ProgressChart: React.FC = ({ distribution, startDate, endDate, totalIssues }) => { +const ProgressChart: React.FC = ({ distribution, startDate, endDate, totalIssues, className = "" }) => { const chartData = Object.keys(distribution ?? []).map((key) => ({ currentDate: renderFormattedDateWithoutYear(key), pending: distribution[key], })); const generateXAxisTickValues = () => { - const start = new Date(startDate); - const end = new Date(endDate); + const start = getDate(startDate); + const end = getDate(endDate); let dates: Date[] = []; - if (isValid(start) && isValid(end)) { + if (start && end && isValid(start) && isValid(end)) { dates = eachDayOfInterval({ start, end }); } @@ -73,7 +74,7 @@ const ProgressChart: React.FC = ({ distribution, startDate, endDate, tota }; return ( -
+
void; diff --git a/web/components/core/sidebar/sidebar-progress-stats.tsx b/web/components/core/sidebar/sidebar-progress-stats.tsx index 12c387f471e..21b508e2885 100644 --- a/web/components/core/sidebar/sidebar-progress-stats.tsx +++ b/web/components/core/sidebar/sidebar-progress-stats.tsx @@ -3,16 +3,6 @@ import React from "react"; import Image from "next/image"; // headless ui import { Tab } from "@headlessui/react"; -// hooks -import useLocalStorage from "hooks/use-local-storage"; -// images -import emptyLabel from "public/empty-state/empty_label.svg"; -import emptyMembers from "public/empty-state/empty_members.svg"; -// components -import { SingleProgressStats } from "components/core"; -// ui -import { Avatar, StateGroupIcon } from "@plane/ui"; -// types import { IModule, TAssigneesDistribution, @@ -20,6 +10,16 @@ import { TLabelsDistribution, TStateGroups, } from "@plane/types"; +// hooks +import { Avatar, StateGroupIcon } from "@plane/ui"; +import { SingleProgressStats } from "@/components/core"; +import useLocalStorage from "@/hooks/use-local-storage"; +// images +import emptyLabel from "public/empty-state/empty_label.svg"; +import emptyMembers from "public/empty-state/empty_members.svg"; +// components +// ui +// types type Props = { distribution: { @@ -127,7 +127,7 @@ export const SidebarProgressStats: React.FC = ({ {distribution?.assignees.length > 0 ? ( distribution.assignees.map((assignee, index) => { @@ -137,8 +137,8 @@ export const SidebarProgressStats: React.FC = ({ key={assignee.assignee_id} title={
- - {assignee.display_name} + + {assignee?.display_name ?? ""}
} completed={assignee.completed_issues} @@ -187,7 +187,7 @@ export const SidebarProgressStats: React.FC = ({
{distribution?.labels.length > 0 ? ( distribution.labels.map((label, index) => ( @@ -230,7 +230,7 @@ export const SidebarProgressStats: React.FC = ({ {Object.keys(groupedIssues).map((group, index) => ( = observer((props) => { - // props - const { workspaceSlug, projectId } = props; - const { resolvedTheme } = useTheme(); - // store hooks - const { currentUser } = useUser(); - const { - issues: { fetchActiveCycleIssues }, - } = useIssues(EIssuesStoreType.CYCLE); - const { - fetchActiveCycle, - currentProjectActiveCycleId, - getActiveCycleById, - addCycleToFavorites, - removeCycleFromFavorites, - } = useCycle(); - const { currentProjectDetails } = useProject(); - const { getUserDetails } = useMember(); - // toast alert - const { setToastAlert } = useToast(); - - const { isLoading } = useSWR( - workspaceSlug && projectId ? `PROJECT_ACTIVE_CYCLE_${projectId}` : null, - workspaceSlug && projectId ? () => fetchActiveCycle(workspaceSlug, projectId) : null - ); - - const activeCycle = currentProjectActiveCycleId ? getActiveCycleById(currentProjectActiveCycleId) : null; - const cycleOwnerDetails = activeCycle ? getUserDetails(activeCycle.owned_by_id) : undefined; - - const { data: activeCycleIssues } = useSWR( - workspaceSlug && projectId && currentProjectActiveCycleId - ? CYCLE_ISSUES_WITH_PARAMS(currentProjectActiveCycleId, { priority: "urgent,high" }) - : null, - workspaceSlug && projectId && currentProjectActiveCycleId - ? () => fetchActiveCycleIssues(workspaceSlug, projectId, currentProjectActiveCycleId) - : null - ); - - const emptyStateDetail = CYCLE_EMPTY_STATE_DETAILS["active"]; - - const isLightMode = resolvedTheme ? resolvedTheme === "light" : currentUser?.theme.theme === "light"; - const emptyStateImage = getEmptyStateImagePath("cycle", "active", isLightMode); - - if (!activeCycle && isLoading) - return ( - - - - ); - - if (!activeCycle) - return ( - - ); - - const endDate = new Date(activeCycle.end_date ?? ""); - const startDate = new Date(activeCycle.start_date ?? ""); - - const groupedIssues: any = { - backlog: activeCycle.backlog_issues, - unstarted: activeCycle.unstarted_issues, - started: activeCycle.started_issues, - completed: activeCycle.completed_issues, - cancelled: activeCycle.cancelled_issues, - }; - - const cycleStatus = activeCycle.status.toLowerCase() as TCycleGroups; - - const handleAddToFavorites = (e: MouseEvent) => { - e.preventDefault(); - if (!workspaceSlug || !projectId) return; - - addCycleToFavorites(workspaceSlug?.toString(), projectId.toString(), activeCycle.id).catch(() => { - setToastAlert({ - type: "error", - title: "Error!", - message: "Couldn't add the cycle to favorites. Please try again.", - }); - }); - }; - - const handleRemoveFromFavorites = (e: MouseEvent) => { - e.preventDefault(); - if (!workspaceSlug || !projectId) return; - - removeCycleFromFavorites(workspaceSlug?.toString(), projectId.toString(), activeCycle.id).catch(() => { - setToastAlert({ - type: "error", - title: "Error!", - message: "Couldn't add the cycle to favorites. Please try again.", - }); - }); - }; - - const progressIndicatorData = CYCLE_STATE_GROUPS_DETAILS.map((group, index) => ({ - id: index, - name: group.title, - value: - activeCycle.total_issues > 0 - ? ((activeCycle[group.key as keyof ICycle] as number) / activeCycle.total_issues) * 100 - : 0, - color: group.color, - })); - - const daysLeft = findHowManyDaysLeft(activeCycle.end_date) ?? 0; - - return ( -
-
-
-
-
-
- - - - - -

{truncateText(activeCycle.name, 70)}

-
-
- - - {`${daysLeft} ${daysLeft > 1 ? "days" : "day"} left`} - - {activeCycle.is_favorite ? ( - - ) : ( - - )} - -
- -
-
- - {renderFormattedDate(startDate)} -
- -
- - {renderFormattedDate(endDate)} -
-
- -
-
- {cycleOwnerDetails?.avatar && cycleOwnerDetails?.avatar !== "" ? ( - {cycleOwnerDetails?.display_name} - ) : ( - - {cycleOwnerDetails?.display_name.charAt(0)} - - )} - {cycleOwnerDetails?.display_name} -
- - {activeCycle.assignee_ids.length > 0 && ( -
- - {activeCycle.assignee_ids.map((assigne_id) => { - const member = getUserDetails(assigne_id); - return ; - })} - -
- )} -
- -
-
- - {activeCycle.total_issues} issues -
-
- - {activeCycle.completed_issues} issues -
-
- - - View Cycle - -
-
-
-
-
-
-
- Progress - -
-
- {Object.keys(groupedIssues).map((group, index) => ( - - - {group} -
- } - completed={groupedIssues[group]} - total={activeCycle.total_issues} - /> - ))} -
-
-
-
- -
-
-
-
-
-
High Priority Issues
-
- {activeCycleIssues ? ( - activeCycleIssues.length > 0 ? ( - activeCycleIssues.map((issue: any) => ( - -
- - - - - {currentProjectDetails?.identifier}-{issue.sequence_id} - - - - {truncateText(issue.name, 30)} - -
-
- {}} - projectId={projectId?.toString() ?? ""} - disabled={true} - buttonVariant="background-with-text" - /> - {issue.target_date && ( - -
- - {renderFormattedDateWithoutYear(issue.target_date)} -
-
- )} -
- - )) - ) : ( -
- There are no high priority issues present in this cycle. -
- ) - ) : ( - - - - - - )} -
-
-
-
-
-
- - Ideal -
-
- - Current -
-
-
- - - - - Pending Issues -{" "} - {activeCycle.total_issues - (activeCycle.completed_issues + activeCycle.cancelled_issues)} - -
-
-
- -
-
-
-
- ); -}); diff --git a/web/components/cycles/active-cycle/cycle-stats.tsx b/web/components/cycles/active-cycle/cycle-stats.tsx new file mode 100644 index 00000000000..2eb12876324 --- /dev/null +++ b/web/components/cycles/active-cycle/cycle-stats.tsx @@ -0,0 +1,278 @@ +import { FC, Fragment } from "react"; +import { observer } from "mobx-react"; +import Link from "next/link"; +import useSWR from "swr"; +import { CalendarCheck } from "lucide-react"; +import { Tab } from "@headlessui/react"; +// types +import { ICycle, TIssue } from "@plane/types"; +// ui +import { Tooltip, Loader, PriorityIcon, Avatar } from "@plane/ui"; +// components +import { SingleProgressStats } from "@/components/core"; +import { StateDropdown } from "@/components/dropdowns"; +import { EmptyState } from "@/components/empty-state"; +// constants +import { EmptyStateType } from "@/constants/empty-state"; +import { CYCLE_ISSUES_WITH_PARAMS } from "@/constants/fetch-keys"; +import { EIssuesStoreType } from "@/constants/issue"; +// helper +import { cn } from "@/helpers/common.helper"; +import { renderFormattedDate, renderFormattedDateWithoutYear } from "@/helpers/date-time.helper"; +// hooks +import { useIssues, useProject } from "@/hooks/store"; +import useLocalStorage from "@/hooks/use-local-storage"; + +export type ActiveCycleStatsProps = { + workspaceSlug: string; + projectId: string; + cycle: ICycle; +}; + +export const ActiveCycleStats: FC = observer((props) => { + const { workspaceSlug, projectId, cycle } = props; + + const { storedValue: tab, setValue: setTab } = useLocalStorage("activeCycleTab", "Assignees"); + + const currentValue = (tab: string | null) => { + switch (tab) { + case "Priority-Issues": + return 0; + case "Assignees": + return 1; + case "Labels": + return 2; + default: + return 0; + } + }; + const { + issues: { fetchActiveCycleIssues }, + } = useIssues(EIssuesStoreType.CYCLE); + + const { currentProjectDetails } = useProject(); + + const { data: activeCycleIssues } = useSWR( + workspaceSlug && projectId && cycle.id ? CYCLE_ISSUES_WITH_PARAMS(cycle.id, { priority: "urgent,high" }) : null, + workspaceSlug && projectId && cycle.id ? () => fetchActiveCycleIssues(workspaceSlug, projectId, cycle.id) : null + ); + + const cycleIssues = activeCycleIssues ?? []; + + return ( +
+ { + switch (i) { + case 0: + return setTab("Priority-Issues"); + case 1: + return setTab("Assignees"); + case 2: + return setTab("Labels"); + + default: + return setTab("Priority-Issues"); + } + }} + > + + + cn( + "relative z-[1] font-semibold text-xs rounded-[3px] py-1.5 text-custom-text-400 focus:outline-none transition duration-500", + { + "text-custom-text-300 bg-custom-background-100": selected, + "hover:text-custom-text-300": !selected, + } + ) + } + > + Priority Issues + + + cn( + "relative z-[1] font-semibold text-xs rounded-[3px] py-1.5 text-custom-text-400 focus:outline-none transition duration-500", + { + "text-custom-text-300 bg-custom-background-100": selected, + "hover:text-custom-text-300": !selected, + } + ) + } + > + Assignees + + + cn( + "relative z-[1] font-semibold text-xs rounded-[3px] py-1.5 text-custom-text-400 focus:outline-none transition duration-500", + { + "text-custom-text-300 bg-custom-background-100": selected, + "hover:text-custom-text-300": !selected, + } + ) + } + > + Labels + + + + + +
+ {cycleIssues ? ( + cycleIssues.length > 0 ? ( + cycleIssues.map((issue: TIssue) => ( + +
+ + + + + {currentProjectDetails?.identifier}-{issue.sequence_id} + + + + {issue.name} + +
+
+ {}} + projectId={projectId?.toString() ?? ""} + disabled + buttonVariant="background-with-text" + buttonContainerClassName="cursor-pointer max-w-24" + showTooltip + /> + {issue.target_date && ( + +
+ + + {renderFormattedDateWithoutYear(issue.target_date)} + +
+
+ )} +
+ + )) + ) : ( +
+ +
+ ) + ) : ( + + + + + + )} +
+
+ + + {cycle?.distribution?.assignees && cycle.distribution.assignees.length > 0 ? ( + cycle.distribution?.assignees?.map((assignee, index) => { + if (assignee.assignee_id) + return ( + + + + {assignee.display_name} +
+ } + completed={assignee.completed_issues} + total={assignee.total_issues} + /> + ); + else + return ( + +
+ User +
+ No assignee +
+ } + completed={assignee.completed_issues} + total={assignee.total_issues} + /> + ); + }) + ) : ( +
+ +
+ )} + + + + {cycle?.distribution?.labels && cycle.distribution.labels.length > 0 ? ( + cycle.distribution.labels?.map((label, index) => ( + + + {label.label_name ?? "No labels"} +
+ } + completed={label.completed_issues} + total={label.total_issues} + /> + )) + ) : ( +
+ +
+ )} + + + +
+ ); +}); diff --git a/web/components/cycles/active-cycle/header.tsx b/web/components/cycles/active-cycle/header.tsx new file mode 100644 index 00000000000..98ed91c1dd0 --- /dev/null +++ b/web/components/cycles/active-cycle/header.tsx @@ -0,0 +1,77 @@ +import { FC } from "react"; +import Link from "next/link"; +// types +import { ICycle, TCycleGroups } from "@plane/types"; +// ui +import { Tooltip, CycleGroupIcon, getButtonStyling, Avatar, AvatarGroup } from "@plane/ui"; +// helpers +import { renderFormattedDate, findHowManyDaysLeft } from "@/helpers/date-time.helper"; +import { truncateText } from "@/helpers/string.helper"; +// hooks +import { useMember } from "@/hooks/store"; + +export type ActiveCycleHeaderProps = { + cycle: ICycle; + workspaceSlug: string; + projectId: string; +}; + +export const ActiveCycleHeader: FC = (props) => { + const { cycle, workspaceSlug, projectId } = props; + // store + const { getUserDetails } = useMember(); + const cycleOwnerDetails = cycle && cycle.owned_by_id ? getUserDetails(cycle.owned_by_id) : undefined; + + const daysLeft = findHowManyDaysLeft(cycle.end_date) ?? 0; + const currentCycleStatus = cycle.status.toLocaleLowerCase() as TCycleGroups; + + const cycleAssignee = (cycle.distribution?.assignees ?? []).filter((assignee) => assignee.display_name); + + return ( +
+
+ + +

{truncateText(cycle.name, 70)}

+
+ + + {`${daysLeft} ${daysLeft > 1 ? "days" : "day"} left`} + + +
+
+
+
+ + {cycleAssignee.length > 0 && ( + + + {cycleAssignee.map((member) => ( + + ))} + + + )} +
+
+ + View Cycle + +
+
+ ); +}; diff --git a/web/components/cycles/active-cycle/index.ts b/web/components/cycles/active-cycle/index.ts new file mode 100644 index 00000000000..d88ccc3e8b6 --- /dev/null +++ b/web/components/cycles/active-cycle/index.ts @@ -0,0 +1,8 @@ +export * from "./root"; +export * from "./header"; +export * from "./stats"; +export * from "./upcoming-cycles-list-item"; +export * from "./upcoming-cycles-list"; +export * from "./cycle-stats"; +export * from "./progress"; +export * from "./productivity"; diff --git a/web/components/cycles/active-cycle/productivity.tsx b/web/components/cycles/active-cycle/productivity.tsx new file mode 100644 index 00000000000..a3366d9341c --- /dev/null +++ b/web/components/cycles/active-cycle/productivity.tsx @@ -0,0 +1,58 @@ +import { FC } from "react"; +// types +import { ICycle } from "@plane/types"; +// components +import ProgressChart from "@/components/core/sidebar/progress-chart"; +import { EmptyState } from "@/components/empty-state"; +// constants +import { EmptyStateType } from "@/constants/empty-state"; + +export type ActiveCycleProductivityProps = { + cycle: ICycle; +}; + +export const ActiveCycleProductivity: FC = (props) => { + const { cycle } = props; + + return ( +
+
+

Issue burndown

+
+ {cycle.total_issues > 0 ? ( + <> +
+
+
+
+ + Ideal +
+
+ + Current +
+
+ {`Pending issues - ${cycle.backlog_issues + cycle.unstarted_issues + cycle.started_issues}`} +
+
+ +
+
+ + ) : ( + <> +
+ +
+ + )} +
+ ); +}; diff --git a/web/components/cycles/active-cycle/progress.tsx b/web/components/cycles/active-cycle/progress.tsx new file mode 100644 index 00000000000..752f72bcc17 --- /dev/null +++ b/web/components/cycles/active-cycle/progress.tsx @@ -0,0 +1,90 @@ +import { FC } from "react"; +// types +import { ICycle } from "@plane/types"; +// ui +import { LinearProgressIndicator } from "@plane/ui"; +// components +import { EmptyState } from "@/components/empty-state"; +// constants +import { CYCLE_STATE_GROUPS_DETAILS } from "@/constants/cycle"; +import { EmptyStateType } from "@/constants/empty-state"; + +export type ActiveCycleProgressProps = { + cycle: ICycle; +}; + +export const ActiveCycleProgress: FC = (props) => { + const { cycle } = props; + + const progressIndicatorData = CYCLE_STATE_GROUPS_DETAILS.map((group, index) => ({ + id: index, + name: group.title, + value: cycle.total_issues > 0 ? (cycle[group.key as keyof ICycle] as number) : 0, + color: group.color, + })); + + const groupedIssues: any = { + completed: cycle.completed_issues, + started: cycle.started_issues, + unstarted: cycle.unstarted_issues, + backlog: cycle.backlog_issues, + }; + + return ( +
+
+
+

Progress

+ {cycle.total_issues > 0 && ( + + {`${cycle.completed_issues + cycle.cancelled_issues}/${cycle.total_issues - cycle.cancelled_issues} ${ + cycle.completed_issues + cycle.cancelled_issues > 1 ? "Issues" : "Issue" + } closed`} + + )} +
+ {cycle.total_issues > 0 && } +
+ + {cycle.total_issues > 0 ? ( +
+ {Object.keys(groupedIssues).map((group, index) => ( + <> + {groupedIssues[group] > 0 && ( +
+
+
+ + {group} +
+ {`${groupedIssues[group]} ${ + groupedIssues[group] > 1 ? "Issues" : "Issue" + }`} +
+
+ )} + + ))} + {cycle.cancelled_issues > 0 && ( + + + {`${cycle.cancelled_issues} cancelled ${ + cycle.cancelled_issues > 1 ? "issues are" : "issue is" + } excluded from this report.`}{" "} + + + )} +
+ ) : ( +
+ +
+ )} +
+ ); +}; diff --git a/web/components/cycles/active-cycle/root.tsx b/web/components/cycles/active-cycle/root.tsx new file mode 100644 index 00000000000..bd2c3b61328 --- /dev/null +++ b/web/components/cycles/active-cycle/root.tsx @@ -0,0 +1,92 @@ +import { observer } from "mobx-react-lite"; +import useSWR from "swr"; +// ui +import { Loader } from "@plane/ui"; +// components +import { + ActiveCycleHeader, + ActiveCycleProductivity, + ActiveCycleProgress, + ActiveCycleStats, + UpcomingCyclesList, +} from "@/components/cycles"; +import { EmptyState } from "@/components/empty-state"; +// constants +import { EmptyStateType } from "@/constants/empty-state"; +// hooks +import { useCycle, useCycleFilter } from "@/hooks/store"; + +interface IActiveCycleDetails { + workspaceSlug: string; + projectId: string; +} + +export const ActiveCycleRoot: React.FC = observer((props) => { + // props + const { workspaceSlug, projectId } = props; + // store hooks + const { fetchActiveCycle, currentProjectActiveCycleId, currentProjectUpcomingCycleIds, getActiveCycleById } = + useCycle(); + // cycle filters hook + const { updateDisplayFilters } = useCycleFilter(); + // derived values + const activeCycle = currentProjectActiveCycleId ? getActiveCycleById(currentProjectActiveCycleId) : null; + // fetch active cycle details + const { isLoading } = useSWR( + workspaceSlug && projectId ? `PROJECT_ACTIVE_CYCLE_${projectId}` : null, + workspaceSlug && projectId ? () => fetchActiveCycle(workspaceSlug, projectId) : null + ); + + const handleEmptyStateAction = () => + updateDisplayFilters(projectId, { + active_tab: "all", + }); + + // show loader if active cycle is loading + if (!activeCycle && isLoading) + return ( + + + + ); + + if (!activeCycle) { + // show empty state if no active cycle is present + if (currentProjectUpcomingCycleIds?.length === 0) + return ; + // show upcoming cycles list, if present + else + return ( + <> +
+
+
No active cycle
+

+ Create new cycles to find them here or check +
+ {"'"}All{"'"} cycles tab to see all cycles or{" "} + +

+
+
+ + + ); + } + + return ( + <> +
+ +
+ + + +
+
+ {currentProjectUpcomingCycleIds && } + + ); +}); diff --git a/web/components/cycles/active-cycle-stats.tsx b/web/components/cycles/active-cycle/stats.tsx similarity index 95% rename from web/components/cycles/active-cycle-stats.tsx rename to web/components/cycles/active-cycle/stats.tsx index 3ca5caeb204..7e7a97e7d30 100644 --- a/web/components/cycles/active-cycle-stats.tsx +++ b/web/components/cycles/active-cycle/stats.tsx @@ -1,13 +1,13 @@ import React, { Fragment } from "react"; import { Tab } from "@headlessui/react"; +import { ICycle } from "@plane/types"; // hooks -import useLocalStorage from "hooks/use-local-storage"; +import { Avatar } from "@plane/ui"; +import { SingleProgressStats } from "@/components/core"; +import useLocalStorage from "@/hooks/use-local-storage"; // components -import { SingleProgressStats } from "components/core"; // ui -import { Avatar } from "@plane/ui"; // types -import { ICycle } from "@plane/types"; type Props = { cycle: ICycle; @@ -82,7 +82,7 @@ export const ActiveCycleProgressStats: React.FC = ({ cycle }) => {
- {assignee.display_name} + {assignee?.display_name ?? ""}
} completed={assignee.completed_issues} @@ -134,7 +134,7 @@ export const ActiveCycleProgressStats: React.FC = ({ cycle }) => { ) : (
- There are no high priority issues present in this cycle. + There are no issues present in this cycle.
)} diff --git a/web/components/cycles/active-cycle/upcoming-cycles-list-item.tsx b/web/components/cycles/active-cycle/upcoming-cycles-list-item.tsx new file mode 100644 index 00000000000..2e48a95b4dc --- /dev/null +++ b/web/components/cycles/active-cycle/upcoming-cycles-list-item.tsx @@ -0,0 +1,135 @@ +import { observer } from "mobx-react"; +import Link from "next/link"; +import { useRouter } from "next/router"; +import { Star, User2 } from "lucide-react"; +// hooks +import { Avatar, AvatarGroup, setPromiseToast } from "@plane/ui"; +import { CycleQuickActions } from "@/components/cycles"; +import { CYCLE_FAVORITED, CYCLE_UNFAVORITED } from "@/constants/event-tracker"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { useCycle, useEventTracker, useMember } from "@/hooks/store"; +// components +// ui +// helpers +// constants + +type Props = { + cycleId: string; +}; + +export const UpcomingCycleListItem: React.FC = observer((props) => { + const { cycleId } = props; + // router + const router = useRouter(); + const { workspaceSlug, projectId } = router.query; + // store hooks + const { captureEvent } = useEventTracker(); + const { addCycleToFavorites, getCycleById, removeCycleFromFavorites } = useCycle(); + const { getUserDetails } = useMember(); + // derived values + const cycle = getCycleById(cycleId); + + const handleAddToFavorites = (e: React.MouseEvent) => { + e.preventDefault(); + if (!workspaceSlug || !projectId) return; + + const addToFavoritePromise = addCycleToFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId).then( + () => { + captureEvent(CYCLE_FAVORITED, { + cycle_id: cycleId, + element: "List layout", + state: "SUCCESS", + }); + } + ); + + setPromiseToast(addToFavoritePromise, { + loading: "Adding cycle to favorites...", + success: { + title: "Success!", + message: () => "Cycle added to favorites.", + }, + error: { + title: "Error!", + message: () => "Couldn't add the cycle to favorites. Please try again.", + }, + }); + }; + + const handleRemoveFromFavorites = (e: React.MouseEvent) => { + e.preventDefault(); + if (!workspaceSlug || !projectId) return; + + const removeFromFavoritePromise = removeCycleFromFavorites( + workspaceSlug?.toString(), + projectId.toString(), + cycleId + ).then(() => { + captureEvent(CYCLE_UNFAVORITED, { + cycle_id: cycleId, + element: "List layout", + state: "SUCCESS", + }); + }); + + setPromiseToast(removeFromFavoritePromise, { + loading: "Removing cycle from favorites...", + success: { + title: "Success!", + message: () => "Cycle removed from favorites.", + }, + error: { + title: "Error!", + message: () => "Couldn't remove the cycle from favorites. Please try again.", + }, + }); + }; + + if (!cycle) return null; + + return ( + +
{cycle.name}
+
+ {cycle.start_date && cycle.end_date && ( +
+ {renderFormattedDate(cycle.start_date)} - {renderFormattedDate(cycle.end_date)} +
+ )} + {cycle.assignee_ids?.length > 0 ? ( + + {cycle.assignee_ids?.map((assigneeId) => { + const member = getUserDetails(assigneeId); + return ; + })} + + ) : ( + + + + )} + + {cycle.is_favorite ? ( + + ) : ( + + )} + + {workspaceSlug && projectId && ( + + )} +
+ + ); +}); diff --git a/web/components/cycles/active-cycle/upcoming-cycles-list.tsx b/web/components/cycles/active-cycle/upcoming-cycles-list.tsx new file mode 100644 index 00000000000..221ffab0b6f --- /dev/null +++ b/web/components/cycles/active-cycle/upcoming-cycles-list.tsx @@ -0,0 +1,64 @@ +import { FC } from "react"; +import { observer } from "mobx-react"; +import Image from "next/image"; +import { useTheme } from "next-themes"; +// components +import { UpcomingCycleListItem } from "@/components/cycles"; +// hooks +import { useCycle } from "@/hooks/store"; + +type Props = { + handleEmptyStateAction: () => void; +}; + +export const UpcomingCyclesList: FC = observer((props) => { + const { handleEmptyStateAction } = props; + // store hooks + const { currentProjectUpcomingCycleIds } = useCycle(); + + // theme + const { resolvedTheme } = useTheme(); + + const resolvedEmptyStatePath = `/empty-state/active-cycle/cycle-${resolvedTheme === "light" ? "light" : "dark"}.webp`; + + if (!currentProjectUpcomingCycleIds) return null; + + return ( +
+
+ Next cycles +
+ {currentProjectUpcomingCycleIds.length > 0 ? ( +
+ {currentProjectUpcomingCycleIds.map((cycleId) => ( + + ))} +
+ ) : ( +
+
+
+ button image +
+
No upcoming cycles
+

+ Create new cycles to find them here or check +
+ {"'"}All{"'"} cycles tab to see all cycles or{" "} + +

+
+
+ )} +
+ ); +}); diff --git a/web/components/cycles/applied-filters/date.tsx b/web/components/cycles/applied-filters/date.tsx new file mode 100644 index 00000000000..fa6f94c5061 --- /dev/null +++ b/web/components/cycles/applied-filters/date.tsx @@ -0,0 +1,55 @@ +import { observer } from "mobx-react-lite"; +import { X } from "lucide-react"; +// helpers +import { DATE_AFTER_FILTER_OPTIONS } from "@/constants/filters"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { capitalizeFirstLetter } from "@/helpers/string.helper"; +// constants + +type Props = { + editable: boolean | undefined; + handleRemove: (val: string) => void; + values: string[]; +}; + +export const AppliedDateFilters: React.FC = observer((props) => { + const { editable, handleRemove, values } = props; + + const getDateLabel = (value: string): string => { + let dateLabel = ""; + + const dateDetails = DATE_AFTER_FILTER_OPTIONS.find((d) => d.value === value); + + if (dateDetails) dateLabel = dateDetails.name; + else { + const dateParts = value.split(";"); + + if (dateParts.length === 2) { + const [date, time] = dateParts; + + dateLabel = `${capitalizeFirstLetter(time)} ${renderFormattedDate(date)}`; + } + } + + return dateLabel; + }; + + return ( + <> + {values.map((date) => ( +
+ {getDateLabel(date)} + {editable && ( + + )} +
+ ))} + + ); +}); diff --git a/web/components/cycles/applied-filters/index.ts b/web/components/cycles/applied-filters/index.ts new file mode 100644 index 00000000000..cee9ae3497d --- /dev/null +++ b/web/components/cycles/applied-filters/index.ts @@ -0,0 +1,3 @@ +export * from "./date"; +export * from "./root"; +export * from "./status"; diff --git a/web/components/cycles/applied-filters/root.tsx b/web/components/cycles/applied-filters/root.tsx new file mode 100644 index 00000000000..270beadfd4d --- /dev/null +++ b/web/components/cycles/applied-filters/root.tsx @@ -0,0 +1,90 @@ +import { observer } from "mobx-react-lite"; +import { X } from "lucide-react"; +import { TCycleFilters } from "@plane/types"; +// hooks +import { AppliedDateFilters, AppliedStatusFilters } from "@/components/cycles"; +import { EUserProjectRoles } from "@/constants/project"; +import { replaceUnderscoreIfSnakeCase } from "@/helpers/string.helper"; +import { useUser } from "@/hooks/store"; +// components +// helpers +// types +// constants + +type Props = { + appliedFilters: TCycleFilters; + handleClearAllFilters: () => void; + handleRemoveFilter: (key: keyof TCycleFilters, value: string | null) => void; + alwaysAllowEditing?: boolean; +}; + +const DATE_FILTERS = ["start_date", "end_date"]; + +export const CycleAppliedFiltersList: React.FC = observer((props) => { + const { appliedFilters, handleClearAllFilters, handleRemoveFilter, alwaysAllowEditing } = props; + // store hooks + const { + membership: { currentProjectRole }, + } = useUser(); + + if (!appliedFilters) return null; + + if (Object.keys(appliedFilters).length === 0) return null; + + const isEditingAllowed = alwaysAllowEditing || (currentProjectRole && currentProjectRole >= EUserProjectRoles.MEMBER); + + return ( +
+ {Object.entries(appliedFilters).map(([key, value]) => { + const filterKey = key as keyof TCycleFilters; + + if (!value) return; + if (Array.isArray(value) && value.length === 0) return; + + return ( +
+ {replaceUnderscoreIfSnakeCase(filterKey)} +
+ {filterKey === "status" && ( + handleRemoveFilter("status", val)} + values={value} + /> + )} + {DATE_FILTERS.includes(filterKey) && ( + handleRemoveFilter(filterKey, val)} + values={value} + /> + )} + {isEditingAllowed && ( + + )} +
+
+ ); + })} + {isEditingAllowed && ( + + )} +
+ ); +}); diff --git a/web/components/cycles/applied-filters/status.tsx b/web/components/cycles/applied-filters/status.tsx new file mode 100644 index 00000000000..83349948c93 --- /dev/null +++ b/web/components/cycles/applied-filters/status.tsx @@ -0,0 +1,43 @@ +import { observer } from "mobx-react-lite"; +import { X } from "lucide-react"; +import { CYCLE_STATUS } from "@/constants/cycle"; +import { cn } from "@/helpers/common.helper"; + +type Props = { + handleRemove: (val: string) => void; + values: string[]; + editable: boolean | undefined; +}; + +export const AppliedStatusFilters: React.FC = observer((props) => { + const { handleRemove, values, editable } = props; + + return ( + <> + {values.map((status) => { + const statusDetails = CYCLE_STATUS.find((s) => s.value === status); + return ( +
+ {statusDetails?.title} + {editable && ( + + )} +
+ ); + })} + + ); +}); diff --git a/web/components/cycles/archived-cycles/header.tsx b/web/components/cycles/archived-cycles/header.tsx new file mode 100644 index 00000000000..267c873885f --- /dev/null +++ b/web/components/cycles/archived-cycles/header.tsx @@ -0,0 +1,123 @@ +import { FC, useCallback, useRef, useState } from "react"; +import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; +// icons +import { ListFilter, Search, X } from "lucide-react"; +// types +import type { TCycleFilters } from "@plane/types"; +// components +import { ArchiveTabsList } from "@/components/archives"; +import { CycleFiltersSelection } from "@/components/cycles"; +import { FiltersDropdown } from "@/components/issues"; +// helpers +import { cn } from "@/helpers/common.helper"; +// hooks +import { useCycleFilter } from "@/hooks/store"; +import useOutsideClickDetector from "@/hooks/use-outside-click-detector"; + +export const ArchivedCyclesHeader: FC = observer(() => { + // router + const router = useRouter(); + const { projectId } = router.query; + // refs + const inputRef = useRef(null); + // hooks + const { currentProjectArchivedFilters, archivedCyclesSearchQuery, updateFilters, updateArchivedCyclesSearchQuery } = + useCycleFilter(); + // states + const [isSearchOpen, setIsSearchOpen] = useState(archivedCyclesSearchQuery !== "" ? true : false); + // outside click detector hook + useOutsideClickDetector(inputRef, () => { + if (isSearchOpen && archivedCyclesSearchQuery.trim() === "") setIsSearchOpen(false); + }); + + const handleFilters = useCallback( + (key: keyof TCycleFilters, value: string | string[]) => { + if (!projectId) return; + + const newValues = currentProjectArchivedFilters?.[key] ?? []; + + if (Array.isArray(value)) + value.forEach((val) => { + if (!newValues.includes(val)) newValues.push(val); + }); + else { + if (currentProjectArchivedFilters?.[key]?.includes(value)) newValues.splice(newValues.indexOf(value), 1); + else newValues.push(value); + } + + updateFilters(projectId.toString(), { [key]: newValues }, "archived"); + }, + [currentProjectArchivedFilters, projectId, updateFilters] + ); + + const handleInputKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Escape") { + if (archivedCyclesSearchQuery && archivedCyclesSearchQuery.trim() !== "") updateArchivedCyclesSearchQuery(""); + else { + setIsSearchOpen(false); + inputRef.current?.blur(); + } + } + }; + + return ( +
+
+ +
+ {/* filter options */} +
+ {!isSearchOpen && ( + + )} +
+ + updateArchivedCyclesSearchQuery(e.target.value)} + onKeyDown={handleInputKeyDown} + /> + {isSearchOpen && ( + + )} +
+ } title="Filters" placement="bottom-end"> + + +
+
+ ); +}); diff --git a/web/components/cycles/archived-cycles/index.ts b/web/components/cycles/archived-cycles/index.ts new file mode 100644 index 00000000000..f59f0954ef1 --- /dev/null +++ b/web/components/cycles/archived-cycles/index.ts @@ -0,0 +1,4 @@ +export * from "./root"; +export * from "./view"; +export * from "./header"; +export * from "./modal"; diff --git a/web/components/cycles/archived-cycles/modal.tsx b/web/components/cycles/archived-cycles/modal.tsx new file mode 100644 index 00000000000..6e0ddef35bb --- /dev/null +++ b/web/components/cycles/archived-cycles/modal.tsx @@ -0,0 +1,104 @@ +import { useState, Fragment } from "react"; +import { useRouter } from "next/router"; +import { Dialog, Transition } from "@headlessui/react"; +// ui +import { Button, TOAST_TYPE, setToast } from "@plane/ui"; +// hooks +import { useCycle } from "@/hooks/store"; + +type Props = { + workspaceSlug: string; + projectId: string; + cycleId: string; + handleClose: () => void; + isOpen: boolean; + onSubmit?: () => Promise; +}; + +export const ArchiveCycleModal: React.FC = (props) => { + const { workspaceSlug, projectId, cycleId, isOpen, handleClose } = props; + // router + const router = useRouter(); + // states + const [isArchiving, setIsArchiving] = useState(false); + // store hooks + const { getCycleNameById, archiveCycle } = useCycle(); + + const cycleName = getCycleNameById(cycleId); + + const onClose = () => { + setIsArchiving(false); + handleClose(); + }; + + const handleArchiveCycle = async () => { + setIsArchiving(true); + await archiveCycle(workspaceSlug, projectId, cycleId) + .then(() => { + setToast({ + type: TOAST_TYPE.SUCCESS, + title: "Archive success", + message: "Your archives can be found in project archives.", + }); + onClose(); + router.push(`/${workspaceSlug}/projects/${projectId}/archives/cycles?peekCycle=${cycleId}`); + }) + .catch(() => + setToast({ + type: TOAST_TYPE.ERROR, + title: "Error!", + message: "Cycle could not be archived. Please try again.", + }) + ) + .finally(() => setIsArchiving(false)); + }; + + return ( + + + +
+ + +
+
+ + +
+

Archive cycle {cycleName}

+

+ Are you sure you want to archive the cycle? All your archives can be restored later. +

+
+ + +
+
+
+
+
+
+
+
+ ); +}; diff --git a/web/components/cycles/archived-cycles/root.tsx b/web/components/cycles/archived-cycles/root.tsx new file mode 100644 index 00000000000..4d47c8f34e5 --- /dev/null +++ b/web/components/cycles/archived-cycles/root.tsx @@ -0,0 +1,77 @@ +import React from "react"; +import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; +import useSWR from "swr"; +// types +import { TCycleFilters } from "@plane/types"; +// components +import { ArchivedCyclesView, CycleAppliedFiltersList } from "@/components/cycles"; +import { EmptyState } from "@/components/empty-state"; +import { CycleModuleListLayout } from "@/components/ui"; +// constants +import { EmptyStateType } from "@/constants/empty-state"; +// helpers +import { calculateTotalFilters } from "@/helpers/filter.helper"; +// hooks +import { useCycle, useCycleFilter } from "@/hooks/store"; + +export const ArchivedCycleLayoutRoot: React.FC = observer(() => { + // router + const router = useRouter(); + const { workspaceSlug, projectId } = router.query; + // hooks + const { fetchArchivedCycles, currentProjectArchivedCycleIds, loader } = useCycle(); + // cycle filters hook + const { clearAllFilters, currentProjectArchivedFilters, updateFilters } = useCycleFilter(); + // derived values + const totalArchivedCycles = currentProjectArchivedCycleIds?.length ?? 0; + + useSWR( + workspaceSlug && projectId ? `ARCHIVED_CYCLES_${workspaceSlug.toString()}_${projectId.toString()}` : null, + async () => { + if (workspaceSlug && projectId) { + await fetchArchivedCycles(workspaceSlug.toString(), projectId.toString()); + } + }, + { revalidateIfStale: false, revalidateOnFocus: false } + ); + + const handleRemoveFilter = (key: keyof TCycleFilters, value: string | null) => { + if (!projectId) return; + let newValues = currentProjectArchivedFilters?.[key] ?? []; + + if (!value) newValues = []; + else newValues = newValues.filter((val) => val !== value); + + updateFilters(projectId.toString(), { [key]: newValues }, "archived"); + }; + + if (!workspaceSlug || !projectId) return <>; + + if (loader || !currentProjectArchivedCycleIds) { + return ; + } + + return ( + <> + {calculateTotalFilters(currentProjectArchivedFilters ?? {}) !== 0 && ( +
+ clearAllFilters(projectId.toString(), "archived")} + handleRemoveFilter={handleRemoveFilter} + /> +
+ )} + {totalArchivedCycles === 0 ? ( +
+ +
+ ) : ( +
+ +
+ )} + + ); +}); diff --git a/web/components/cycles/archived-cycles/view.tsx b/web/components/cycles/archived-cycles/view.tsx new file mode 100644 index 00000000000..ed86a56b44b --- /dev/null +++ b/web/components/cycles/archived-cycles/view.tsx @@ -0,0 +1,57 @@ +import { FC } from "react"; +import { observer } from "mobx-react-lite"; +import Image from "next/image"; +// components +import { CyclesList } from "@/components/cycles"; +// ui +import { CycleModuleListLayout } from "@/components/ui"; +// hooks +import { useCycle, useCycleFilter } from "@/hooks/store"; +// assets +import AllFiltersImage from "@/public/empty-state/cycle/all-filters.svg"; +import NameFilterImage from "@/public/empty-state/cycle/name-filter.svg"; + +export interface IArchivedCyclesView { + workspaceSlug: string; + projectId: string; +} + +export const ArchivedCyclesView: FC = observer((props) => { + const { workspaceSlug, projectId } = props; + // store hooks + const { getFilteredArchivedCycleIds, loader } = useCycle(); + const { archivedCyclesSearchQuery } = useCycleFilter(); + // derived values + const filteredArchivedCycleIds = getFilteredArchivedCycleIds(projectId); + + if (loader || !filteredArchivedCycleIds) return ; + + if (filteredArchivedCycleIds.length === 0) + return ( +
+
+ No matching cycles +
No matching cycles
+

+ {archivedCyclesSearchQuery.trim() === "" + ? "Remove the filters to see all cycles" + : "Remove the search criteria to see all cycles"} +

+
+
+ ); + + return ( + + ); +}); diff --git a/web/components/cycles/cycles-board-card.tsx b/web/components/cycles/board/cycles-board-card.tsx similarity index 60% rename from web/components/cycles/cycles-board-card.tsx rename to web/components/cycles/board/cycles-board-card.tsx index 7d6b1e00036..34d395db46b 100644 --- a/web/components/cycles/cycles-board-card.tsx +++ b/web/components/cycles/board/cycles-board-card.tsx @@ -1,25 +1,26 @@ -import { FC, MouseEvent, useState } from "react"; -import { useRouter } from "next/router"; -import Link from "next/link"; +import { FC, MouseEvent } from "react"; import { observer } from "mobx-react"; -// hooks -import { useEventTracker, useCycle, useUser, useMember } from "hooks/store"; -import useToast from "hooks/use-toast"; +import Link from "next/link"; +import { useRouter } from "next/router"; // components -import { CycleCreateUpdateModal, CycleDeleteModal } from "components/cycles"; +import { Info, Star } from "lucide-react"; +import type { TCycleGroups } from "@plane/types"; +import { Avatar, AvatarGroup, Tooltip, LayersIcon, CycleGroupIcon, setPromiseToast } from "@plane/ui"; +import { CycleQuickActions } from "@/components/cycles"; +// hooks // ui -import { Avatar, AvatarGroup, CustomMenu, Tooltip, LayersIcon, CycleGroupIcon } from "@plane/ui"; // icons -import { Info, LinkIcon, Pencil, Star, Trash2 } from "lucide-react"; // helpers -import { findHowManyDaysLeft, renderFormattedDate } from "helpers/date-time.helper"; -import { copyTextToClipboard } from "helpers/string.helper"; +// import { copyTextToClipboard } from "@/helpers/string.helper"; +// constants +import { CYCLE_STATUS } from "@/constants/cycle"; +import { CYCLE_FAVORITED, CYCLE_UNFAVORITED } from "@/constants/event-tracker"; +import { EUserWorkspaceRoles } from "@/constants/workspace"; +import { findHowManyDaysLeft, getDate, renderFormattedDate } from "@/helpers/date-time.helper"; // constants -import { CYCLE_STATUS } from "constants/cycle"; -import { EUserWorkspaceRoles } from "constants/workspace"; -import { CYCLE_FAVORITED, CYCLE_UNFAVORITED } from "constants/event-tracker"; +import { useEventTracker, useCycle, useUser, useMember } from "@/hooks/store"; +import { usePlatformOS } from "@/hooks/use-platform-os"; //.types -import { TCycleGroups } from "@plane/types"; export interface ICyclesBoardCard { workspaceSlug: string; @@ -29,29 +30,26 @@ export interface ICyclesBoardCard { export const CyclesBoardCard: FC = observer((props) => { const { cycleId, workspaceSlug, projectId } = props; - // states - const [updateModal, setUpdateModal] = useState(false); - const [deleteModal, setDeleteModal] = useState(false); // router const router = useRouter(); // store - const { setTrackElement, captureEvent } = useEventTracker(); + const { captureEvent } = useEventTracker(); const { membership: { currentProjectRole }, } = useUser(); const { addCycleToFavorites, removeCycleFromFavorites, getCycleById } = useCycle(); const { getUserDetails } = useMember(); - // toast alert - const { setToastAlert } = useToast(); // computed const cycleDetails = getCycleById(cycleId); + // hooks + const { isMobile } = usePlatformOS(); if (!cycleDetails) return null; const cycleStatus = cycleDetails.status.toLocaleLowerCase(); - const isCompleted = cycleStatus === "completed"; - const endDate = new Date(cycleDetails.end_date ?? ""); - const startDate = new Date(cycleDetails.start_date ?? ""); + // const isCompleted = cycleStatus === "completed"; + const endDate = getDate(cycleDetails.end_date); + const startDate = getDate(cycleDetails.start_date); const isDateValid = cycleDetails.start_date || cycleDetails.end_date; const isEditingAllowed = !!currentProjectRole && currentProjectRole >= EUserWorkspaceRoles.MEMBER; @@ -75,74 +73,60 @@ export const CyclesBoardCard: FC = observer((props) => { : `${cycleDetails.completed_issues}/${cycleTotalIssues} Issues` : "0 Issue"; - const handleCopyText = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - const originURL = typeof window !== "undefined" && window.location.origin ? window.location.origin : ""; - - copyTextToClipboard(`${originURL}/${workspaceSlug}/projects/${projectId}/cycles/${cycleId}`).then(() => { - setToastAlert({ - type: "success", - title: "Link Copied!", - message: "Cycle link copied to clipboard.", - }); - }); - }; - const handleAddToFavorites = (e: MouseEvent) => { e.preventDefault(); if (!workspaceSlug || !projectId) return; - addCycleToFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId) - .then(() => { + const addToFavoritePromise = addCycleToFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId).then( + () => { captureEvent(CYCLE_FAVORITED, { cycle_id: cycleId, element: "Grid layout", state: "SUCCESS", }); - }) - .catch(() => { - setToastAlert({ - type: "error", - title: "Error!", - message: "Couldn't add the cycle to favorites. Please try again.", - }); - }); + } + ); + + setPromiseToast(addToFavoritePromise, { + loading: "Adding cycle to favorites...", + success: { + title: "Success!", + message: () => "Cycle added to favorites.", + }, + error: { + title: "Error!", + message: () => "Couldn't add the cycle to favorites. Please try again.", + }, + }); }; const handleRemoveFromFavorites = (e: MouseEvent) => { e.preventDefault(); if (!workspaceSlug || !projectId) return; - removeCycleFromFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId) - .then(() => { - captureEvent(CYCLE_UNFAVORITED, { - cycle_id: cycleId, - element: "Grid layout", - state: "SUCCESS", - }); - }) - .catch(() => { - setToastAlert({ - type: "error", - title: "Error!", - message: "Couldn't add the cycle to favorites. Please try again.", - }); + const removeFromFavoritePromise = removeCycleFromFavorites( + workspaceSlug?.toString(), + projectId.toString(), + cycleId + ).then(() => { + captureEvent(CYCLE_UNFAVORITED, { + cycle_id: cycleId, + element: "Grid layout", + state: "SUCCESS", }); - }; - - const handleEditCycle = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - setTrackElement("Cycles page grid layout"); - setUpdateModal(true); - }; + }); - const handleDeleteCycle = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - setTrackElement("Cycles page grid layout"); - setDeleteModal(true); + setPromiseToast(removeFromFavoritePromise, { + loading: "Removing cycle from favorites...", + success: { + title: "Success!", + message: () => "Cycle removed from favorites.", + }, + error: { + title: "Error!", + message: () => "Couldn't remove the cycle from favorites. Please try again.", + }, + }); }; const openCycleOverview = (e: MouseEvent) => { @@ -150,32 +134,24 @@ export const CyclesBoardCard: FC = observer((props) => { e.preventDefault(); e.stopPropagation(); - router.push({ - pathname: router.pathname, - query: { ...query, peekCycle: cycleId }, - }); + if (query.peekCycle) { + delete query.peekCycle; + router.push({ + pathname: router.pathname, + query: { ...query }, + }); + } else { + router.push({ + pathname: router.pathname, + query: { ...query, peekCycle: cycleId }, + }); + } }; const daysLeft = findHowManyDaysLeft(cycleDetails.end_date) ?? 0; return (
- setUpdateModal(false)} - workspaceSlug={workspaceSlug} - projectId={projectId} - /> - - setDeleteModal(false)} - workspaceSlug={workspaceSlug} - projectId={projectId} - /> -
@@ -183,7 +159,7 @@ export const CyclesBoardCard: FC = observer((props) => { - + {cycleDetails.name}
@@ -214,7 +190,7 @@ export const CyclesBoardCard: FC = observer((props) => { {issueCount}
{cycleDetails.assignee_ids.length > 0 && ( - +
{cycleDetails.assignee_ids.map((assigne_id) => { @@ -228,6 +204,7 @@ export const CyclesBoardCard: FC = observer((props) => {
@@ -267,30 +244,8 @@ export const CyclesBoardCard: FC = observer((props) => { ))} - - {!isCompleted && isEditingAllowed && ( - <> - - - - Edit cycle - - - - - - Delete cycle - - - - )} - - - - Copy cycle link - - - + +
diff --git a/web/components/cycles/board/cycles-board-map.tsx b/web/components/cycles/board/cycles-board-map.tsx new file mode 100644 index 00000000000..3e83ca755d7 --- /dev/null +++ b/web/components/cycles/board/cycles-board-map.tsx @@ -0,0 +1,25 @@ +// components +import { CyclesBoardCard } from "@/components/cycles"; + +type Props = { + cycleIds: string[]; + peekCycle: string | undefined; + projectId: string; + workspaceSlug: string; +}; + +export const CyclesBoardMap: React.FC = (props) => { + const { cycleIds, peekCycle, projectId, workspaceSlug } = props; + + return ( +
+ {cycleIds.map((cycleId) => ( + + ))} +
+ ); +}; diff --git a/web/components/cycles/board/index.ts b/web/components/cycles/board/index.ts new file mode 100644 index 00000000000..2e6933d99d5 --- /dev/null +++ b/web/components/cycles/board/index.ts @@ -0,0 +1,3 @@ +export * from "./cycles-board-card"; +export * from "./cycles-board-map"; +export * from "./root"; diff --git a/web/components/cycles/board/root.tsx b/web/components/cycles/board/root.tsx new file mode 100644 index 00000000000..7796f3efd19 --- /dev/null +++ b/web/components/cycles/board/root.tsx @@ -0,0 +1,62 @@ +import { FC } from "react"; +import { observer } from "mobx-react-lite"; +import { ChevronRight } from "lucide-react"; +import { Disclosure } from "@headlessui/react"; +// components +import { CyclePeekOverview, CyclesBoardMap } from "@/components/cycles"; +// helpers +import { cn } from "@/helpers/common.helper"; + +export interface ICyclesBoard { + completedCycleIds: string[]; + cycleIds: string[]; + workspaceSlug: string; + projectId: string; + peekCycle: string | undefined; +} + +export const CyclesBoard: FC = observer((props) => { + const { completedCycleIds, cycleIds, workspaceSlug, projectId, peekCycle } = props; + + return ( +
+
+
+ {cycleIds.length > 0 && ( + + )} + {completedCycleIds.length !== 0 && ( + + + {({ open }) => ( + <> + Completed cycles ({completedCycleIds.length}) + + + )} + + + + + + )} +
+ +
+
+ ); +}); diff --git a/web/components/cycles/cycle-mobile-header.tsx b/web/components/cycles/cycle-mobile-header.tsx index 624334ec473..8c168cbaa08 100644 --- a/web/components/cycles/cycle-mobile-header.tsx +++ b/web/components/cycles/cycle-mobile-header.tsx @@ -1,16 +1,16 @@ import { useCallback, useState } from "react"; import router from "next/router"; //components -import { CustomMenu } from "@plane/ui"; // icons import { Calendar, ChevronDown, Kanban, List } from "lucide-react"; import { IIssueDisplayFilterOptions, IIssueDisplayProperties, IIssueFilterOptions, TIssueLayouts } from "@plane/types"; +import { CustomMenu } from "@plane/ui"; // hooks -import { useIssues, useCycle, useProjectState, useLabel, useMember } from "hooks/store"; // constants -import { EIssueFilterType, EIssuesStoreType, ISSUE_DISPLAY_FILTERS_BY_LAYOUT, ISSUE_LAYOUTS } from "constants/issue"; -import { ProjectAnalyticsModal } from "components/analytics"; -import { DisplayFiltersSelection, FilterSelection, FiltersDropdown } from "components/issues"; +import { ProjectAnalyticsModal } from "@/components/analytics"; +import { DisplayFiltersSelection, FilterSelection, FiltersDropdown } from "@/components/issues"; +import { EIssueFilterType, EIssuesStoreType, ISSUE_DISPLAY_FILTERS_BY_LAYOUT, ISSUE_LAYOUTS } from "@/constants/issue"; +import { useIssues, useCycle, useProjectState, useLabel, useMember } from "@/hooks/store"; export const CycleMobileHeader = () => { const [analyticsModal, setAnalyticsModal] = useState(false); @@ -21,11 +21,7 @@ export const CycleMobileHeader = () => { { key: "calendar", title: "Calendar", icon: Calendar }, ]; - const { workspaceSlug, projectId, cycleId } = router.query as { - workspaceSlug: string; - projectId: string; - cycleId: string; - }; + const { workspaceSlug, projectId, cycleId } = router.query; const cycleDetails = cycleId ? getCycleById(cycleId.toString()) : undefined; // store hooks const { @@ -35,8 +31,14 @@ export const CycleMobileHeader = () => { const handleLayoutChange = useCallback( (layout: TIssueLayouts) => { - if (!workspaceSlug || !projectId) return; - updateFilters(workspaceSlug, projectId, EIssueFilterType.DISPLAY_FILTERS, { layout: layout }, cycleId); + if (!workspaceSlug || !projectId || !cycleId) return; + updateFilters( + workspaceSlug.toString(), + projectId.toString(), + EIssueFilterType.DISPLAY_FILTERS, + { layout: layout }, + cycleId.toString() + ); }, [workspaceSlug, projectId, cycleId, updateFilters] ); @@ -49,7 +51,7 @@ export const CycleMobileHeader = () => { const handleFiltersUpdate = useCallback( (key: keyof IIssueFilterOptions, value: string | string[]) => { - if (!workspaceSlug || !projectId) return; + if (!workspaceSlug || !projectId || !cycleId) return; const newValues = issueFilters?.filters?.[key] ?? []; if (Array.isArray(value)) { @@ -61,23 +63,41 @@ export const CycleMobileHeader = () => { else newValues.push(value); } - updateFilters(workspaceSlug, projectId, EIssueFilterType.FILTERS, { [key]: newValues }, cycleId); + updateFilters( + workspaceSlug.toString(), + projectId.toString(), + EIssueFilterType.FILTERS, + { [key]: newValues }, + cycleId.toString() + ); }, [workspaceSlug, projectId, cycleId, issueFilters, updateFilters] ); const handleDisplayFilters = useCallback( (updatedDisplayFilter: Partial) => { - if (!workspaceSlug || !projectId) return; - updateFilters(workspaceSlug, projectId, EIssueFilterType.DISPLAY_FILTERS, updatedDisplayFilter, cycleId); + if (!workspaceSlug || !projectId || !cycleId) return; + updateFilters( + workspaceSlug.toString(), + projectId.toString(), + EIssueFilterType.DISPLAY_FILTERS, + updatedDisplayFilter, + cycleId.toString() + ); }, [workspaceSlug, projectId, cycleId, updateFilters] ); const handleDisplayProperties = useCallback( (property: Partial) => { - if (!workspaceSlug || !projectId) return; - updateFilters(workspaceSlug, projectId, EIssueFilterType.DISPLAY_PROPERTIES, property, cycleId); + if (!workspaceSlug || !projectId || !cycleId) return; + updateFilters( + workspaceSlug.toString(), + projectId.toString(), + EIssueFilterType.DISPLAY_PROPERTIES, + property, + cycleId.toString() + ); }, [workspaceSlug, projectId, cycleId, updateFilters] ); @@ -89,7 +109,7 @@ export const CycleMobileHeader = () => { onClose={() => setAnalyticsModal(false)} cycleDetails={cycleDetails ?? undefined} /> -
+
{ > {layouts.map((layout, index) => ( { handleLayoutChange(ISSUE_LAYOUTS[index].key); }} diff --git a/web/components/cycles/cycle-peek-overview.tsx b/web/components/cycles/cycle-peek-overview.tsx index fbfb46b50b5..8409c06fe36 100644 --- a/web/components/cycles/cycle-peek-overview.tsx +++ b/web/components/cycles/cycle-peek-overview.tsx @@ -1,17 +1,18 @@ import React, { useEffect } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; // hooks -import { useCycle } from "hooks/store"; +import { useCycle } from "@/hooks/store"; // components import { CycleDetailsSidebar } from "./sidebar"; type Props = { projectId: string; workspaceSlug: string; + isArchived?: boolean; }; -export const CyclePeekOverview: React.FC = observer(({ projectId, workspaceSlug }) => { +export const CyclePeekOverview: React.FC = observer(({ projectId, workspaceSlug, isArchived = false }) => { // router const router = useRouter(); const { peekCycle } = router.query; @@ -29,22 +30,26 @@ export const CyclePeekOverview: React.FC = observer(({ projectId, workspa }; useEffect(() => { - if (!peekCycle) return; + if (!peekCycle || isArchived) return; fetchCycleDetails(workspaceSlug, projectId, peekCycle.toString()); - }, [fetchCycleDetails, peekCycle, projectId, workspaceSlug]); + }, [fetchCycleDetails, isArchived, peekCycle, projectId, workspaceSlug]); return ( <> {peekCycle && (
- +
)} diff --git a/web/components/cycles/cycles-board.tsx b/web/components/cycles/cycles-board.tsx deleted file mode 100644 index 1a90692675d..00000000000 --- a/web/components/cycles/cycles-board.tsx +++ /dev/null @@ -1,64 +0,0 @@ -import { FC } from "react"; -import { observer } from "mobx-react-lite"; -import { useTheme } from "next-themes"; -// hooks -import { useUser } from "hooks/store"; -// components -import { CyclePeekOverview, CyclesBoardCard } from "components/cycles"; -import { EmptyState, getEmptyStateImagePath } from "components/empty-state"; -// constants -import { CYCLE_EMPTY_STATE_DETAILS } from "constants/empty-state"; - -export interface ICyclesBoard { - cycleIds: string[]; - filter: string; - workspaceSlug: string; - projectId: string; - peekCycle: string | undefined; -} - -export const CyclesBoard: FC = observer((props) => { - const { cycleIds, filter, workspaceSlug, projectId, peekCycle } = props; - // theme - const { resolvedTheme } = useTheme(); - // store hooks - const { currentUser } = useUser(); - - const emptyStateDetail = CYCLE_EMPTY_STATE_DETAILS[filter as keyof typeof CYCLE_EMPTY_STATE_DETAILS]; - - const isLightMode = resolvedTheme ? resolvedTheme === "light" : currentUser?.theme.theme === "light"; - const emptyStateImage = getEmptyStateImagePath("cycle", filter, isLightMode); - - return ( - <> - {cycleIds?.length > 0 ? ( -
-
-
- {cycleIds.map((cycleId) => ( - - ))} -
- -
-
- ) : ( - - )} - - ); -}); diff --git a/web/components/cycles/cycles-list-mobile-header.tsx b/web/components/cycles/cycles-list-mobile-header.tsx new file mode 100644 index 00000000000..590cb794f60 --- /dev/null +++ b/web/components/cycles/cycles-list-mobile-header.tsx @@ -0,0 +1,52 @@ +import { observer } from "mobx-react"; +// ui +import { List } from "lucide-react"; +import { CustomMenu } from "@plane/ui"; +// icon +// constants +import { CYCLE_VIEW_LAYOUTS } from "@/constants/cycle"; +// hooks +import { useCycleFilter, useProject } from "@/hooks/store"; + +const CyclesListMobileHeader = observer(() => { + const { currentProjectDetails } = useProject(); + // hooks + const { updateDisplayFilters } = useCycleFilter(); + return ( +
+ + + Layout + + } + customButtonClassName="flex flex-grow justify-center items-center text-custom-text-200 text-sm" + closeOnSelect + > + {CYCLE_VIEW_LAYOUTS.map((layout) => { + if (layout.key == "gantt") return; + return ( + { + updateDisplayFilters(currentProjectDetails!.id, { + layout: layout.key, + }); + }} + className="flex items-center gap-2" + > + +
{layout.title}
+
+ ); + })} +
+
+ ); +}); + +export default CyclesListMobileHeader; diff --git a/web/components/cycles/cycles-list.tsx b/web/components/cycles/cycles-list.tsx deleted file mode 100644 index 173a7f4b7cd..00000000000 --- a/web/components/cycles/cycles-list.tsx +++ /dev/null @@ -1,74 +0,0 @@ -import { FC } from "react"; -import { observer } from "mobx-react-lite"; -import { useTheme } from "next-themes"; -// hooks -import { useUser } from "hooks/store"; -// components -import { CyclePeekOverview, CyclesListItem } from "components/cycles"; -import { EmptyState, getEmptyStateImagePath } from "components/empty-state"; -// ui -import { Loader } from "@plane/ui"; -// constants -import { CYCLE_EMPTY_STATE_DETAILS } from "constants/empty-state"; - -export interface ICyclesList { - cycleIds: string[]; - filter: string; - workspaceSlug: string; - projectId: string; -} - -export const CyclesList: FC = observer((props) => { - const { cycleIds, filter, workspaceSlug, projectId } = props; - // theme - const { resolvedTheme } = useTheme(); - // store hooks - const { currentUser } = useUser(); - - const emptyStateDetail = CYCLE_EMPTY_STATE_DETAILS[filter as keyof typeof CYCLE_EMPTY_STATE_DETAILS]; - - const isLightMode = resolvedTheme ? resolvedTheme === "light" : currentUser?.theme.theme === "light"; - const emptyStateImage = getEmptyStateImagePath("cycle", filter, isLightMode); - - return ( - <> - {cycleIds ? ( - <> - {cycleIds.length > 0 ? ( -
-
-
- {cycleIds.map((cycleId) => ( - - ))} -
- -
-
- ) : ( - - )} - - ) : ( - - - - - - )} - - ); -}); diff --git a/web/components/cycles/cycles-view-header.tsx b/web/components/cycles/cycles-view-header.tsx new file mode 100644 index 00000000000..aad650dd66b --- /dev/null +++ b/web/components/cycles/cycles-view-header.tsx @@ -0,0 +1,167 @@ +import { useCallback, useRef, useState } from "react"; +import { observer } from "mobx-react"; +import { ListFilter, Search, X } from "lucide-react"; +import { Tab } from "@headlessui/react"; +// types +import { TCycleFilters } from "@plane/types"; +// ui +import { Tooltip } from "@plane/ui"; +// components +import { CycleFiltersSelection } from "@/components/cycles"; +import { FiltersDropdown } from "@/components/issues"; +// constants +import { CYCLE_TABS_LIST, CYCLE_VIEW_LAYOUTS } from "@/constants/cycle"; +// helpers +import { cn } from "@/helpers/common.helper"; +// hooks +import { useCycleFilter } from "@/hooks/store"; +import useOutsideClickDetector from "@/hooks/use-outside-click-detector"; +import { usePlatformOS } from "@/hooks/use-platform-os"; + +type Props = { + projectId: string; +}; + +export const CyclesViewHeader: React.FC = observer((props) => { + const { projectId } = props; + // refs + const inputRef = useRef(null); + // hooks + const { + currentProjectDisplayFilters, + currentProjectFilters, + searchQuery, + updateDisplayFilters, + updateFilters, + updateSearchQuery, + } = useCycleFilter(); + const { isMobile } = usePlatformOS(); + // states + const [isSearchOpen, setIsSearchOpen] = useState(searchQuery !== "" ? true : false); + // outside click detector hook + useOutsideClickDetector(inputRef, () => { + if (isSearchOpen && searchQuery.trim() === "") setIsSearchOpen(false); + }); + // derived values + const activeLayout = currentProjectDisplayFilters?.layout ?? "list"; + + const handleFilters = useCallback( + (key: keyof TCycleFilters, value: string | string[]) => { + const newValues = currentProjectFilters?.[key] ?? []; + + if (Array.isArray(value)) + value.forEach((val) => { + if (!newValues.includes(val)) newValues.push(val); + }); + else { + if (currentProjectFilters?.[key]?.includes(value)) newValues.splice(newValues.indexOf(value), 1); + else newValues.push(value); + } + + updateFilters(projectId, { [key]: newValues }); + }, + [currentProjectFilters, projectId, updateFilters] + ); + + const handleInputKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Escape") { + if (searchQuery && searchQuery.trim() !== "") updateSearchQuery(""); + else { + setIsSearchOpen(false); + inputRef.current?.blur(); + } + } + }; + + return ( +
+ + {CYCLE_TABS_LIST.map((tab) => ( + + `border-b-2 p-4 text-sm font-medium outline-none ${ + selected ? "border-custom-primary-100 text-custom-primary-100" : "border-transparent" + }` + } + > + {tab.name} + + ))} + + {currentProjectDisplayFilters?.active_tab !== "active" && ( +
+ {!isSearchOpen && ( + + )} +
+ + updateSearchQuery(e.target.value)} + onKeyDown={handleInputKeyDown} + /> + {isSearchOpen && ( + + )} +
+ } title="Filters" placement="bottom-end"> + + +
+ {CYCLE_VIEW_LAYOUTS.map((layout) => ( + + + + ))} +
+
+ )} +
+ ); +}); diff --git a/web/components/cycles/cycles-view.tsx b/web/components/cycles/cycles-view.tsx index a321be0b592..ddb45b5e52c 100644 --- a/web/components/cycles/cycles-view.tsx +++ b/web/components/cycles/cycles-view.tsx @@ -1,43 +1,35 @@ import { FC } from "react"; import { observer } from "mobx-react-lite"; +import Image from "next/image"; +import { TCycleLayoutOptions } from "@plane/types"; // hooks -import { useCycle } from "hooks/store"; // components -import { CyclesBoard, CyclesList, CyclesListGanttChartView } from "components/cycles"; -// ui components -import { CycleModuleBoardLayout, CycleModuleListLayout, GanttLayoutLoader } from "components/ui"; +import { CyclesBoard, CyclesList, CyclesListGanttChartView } from "@/components/cycles"; +// ui +import { CycleModuleBoardLayout, CycleModuleListLayout, GanttLayoutLoader } from "@/components/ui"; +import { useCycle, useCycleFilter } from "@/hooks/store"; +// assets +import AllFiltersImage from "public/empty-state/cycle/all-filters.svg"; +import NameFilterImage from "public/empty-state/cycle/name-filter.svg"; // types -import { TCycleLayout, TCycleView } from "@plane/types"; export interface ICyclesView { - filter: TCycleView; - layout: TCycleLayout; + layout: TCycleLayoutOptions; workspaceSlug: string; projectId: string; peekCycle: string | undefined; } export const CyclesView: FC = observer((props) => { - const { filter, layout, workspaceSlug, projectId, peekCycle } = props; + const { layout, workspaceSlug, projectId, peekCycle } = props; // store hooks - const { - currentProjectCompletedCycleIds, - currentProjectDraftCycleIds, - currentProjectUpcomingCycleIds, - currentProjectCycleIds, - loader, - } = useCycle(); + const { getFilteredCycleIds, getFilteredCompletedCycleIds, loader } = useCycle(); + const { searchQuery } = useCycleFilter(); + // derived values + const filteredCycleIds = getFilteredCycleIds(projectId); + const filteredCompletedCycleIds = getFilteredCompletedCycleIds(projectId); - const cyclesList = - filter === "completed" - ? currentProjectCompletedCycleIds - : filter === "draft" - ? currentProjectDraftCycleIds - : filter === "upcoming" - ? currentProjectUpcomingCycleIds - : currentProjectCycleIds; - - if (loader || !cyclesList) + if (loader || !filteredCycleIds) return ( <> {layout === "list" && } @@ -46,23 +38,45 @@ export const CyclesView: FC = observer((props) => { ); + if (filteredCycleIds.length === 0 && filteredCompletedCycleIds?.length === 0) + return ( +
+
+ No matching cycles +
No matching cycles
+

+ {searchQuery.trim() === "" + ? "Remove the filters to see all cycles" + : "Remove the search criteria to see all cycles"} +

+
+
+ ); + return ( <> {layout === "list" && ( - + )} - {layout === "board" && ( )} - - {layout === "gantt" && } + {layout === "gantt" && } ); }); diff --git a/web/components/cycles/delete-modal.tsx b/web/components/cycles/delete-modal.tsx index 5dc0306ab45..5b9eb53cdce 100644 --- a/web/components/cycles/delete-modal.tsx +++ b/web/components/cycles/delete-modal.tsx @@ -1,17 +1,16 @@ import { Fragment, useState } from "react"; -import { useRouter } from "next/router"; -import { Dialog, Transition } from "@headlessui/react"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { AlertTriangle } from "lucide-react"; +import { Dialog, Transition } from "@headlessui/react"; +import { ICycle } from "@plane/types"; // hooks -import { useEventTracker, useCycle } from "hooks/store"; -import useToast from "hooks/use-toast"; +import { Button, TOAST_TYPE, setToast } from "@plane/ui"; +import { CYCLE_DELETED } from "@/constants/event-tracker"; +import { useEventTracker, useCycle } from "@/hooks/store"; // components -import { Button } from "@plane/ui"; // types -import { ICycle } from "@plane/types"; // constants -import { CYCLE_DELETED } from "constants/event-tracker"; interface ICycleDelete { cycle: ICycle; @@ -31,8 +30,6 @@ export const CycleDeleteModal: React.FC = observer((props) => { // store hooks const { captureCycleEvent } = useEventTracker(); const { deleteCycle } = useCycle(); - // toast alert - const { setToastAlert } = useToast(); const formSubmit = async () => { if (!cycle) return; @@ -41,8 +38,8 @@ export const CycleDeleteModal: React.FC = observer((props) => { try { await deleteCycle(workspaceSlug, projectId, cycle.id) .then(() => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Cycle deleted successfully.", }); @@ -62,8 +59,8 @@ export const CycleDeleteModal: React.FC = observer((props) => { handleClose(); } catch (error) { - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Warning!", message: "Something went wrong please try again later.", }); @@ -106,7 +103,7 @@ export const CycleDeleteModal: React.FC = observer((props) => {
-
Delete Cycle
+
Delete cycle

@@ -121,8 +118,8 @@ export const CycleDeleteModal: React.FC = observer((props) => { Cancel -

diff --git a/web/components/cycles/dropdowns/filters/end-date.tsx b/web/components/cycles/dropdowns/filters/end-date.tsx new file mode 100644 index 00000000000..69907118731 --- /dev/null +++ b/web/components/cycles/dropdowns/filters/end-date.tsx @@ -0,0 +1,65 @@ +import React, { useState } from "react"; +import { observer } from "mobx-react-lite"; + +// components +import { DateFilterModal } from "@/components/core"; +import { FilterHeader, FilterOption } from "@/components/issues"; +// constants +import { DATE_AFTER_FILTER_OPTIONS } from "@/constants/filters"; + +type Props = { + appliedFilters: string[] | null; + handleUpdate: (val: string | string[]) => void; + searchQuery: string; +}; + +export const FilterEndDate: React.FC = observer((props) => { + const { appliedFilters, handleUpdate, searchQuery } = props; + + const [previewEnabled, setPreviewEnabled] = useState(true); + const [isDateFilterModalOpen, setIsDateFilterModalOpen] = useState(false); + + const appliedFiltersCount = appliedFilters?.length ?? 0; + + const filteredOptions = DATE_AFTER_FILTER_OPTIONS.filter((d) => + d.name.toLowerCase().includes(searchQuery.toLowerCase()) + ); + + return ( + <> + {isDateFilterModalOpen && ( + setIsDateFilterModalOpen(false)} + isOpen={isDateFilterModalOpen} + onSelect={(val) => handleUpdate(val)} + title="Due date" + /> + )} + 0 ? ` (${appliedFiltersCount})` : ""}`} + isPreviewEnabled={previewEnabled} + handleIsPreviewEnabled={() => setPreviewEnabled(!previewEnabled)} + /> + {previewEnabled && ( +
+ {filteredOptions.length > 0 ? ( + <> + {filteredOptions.map((option) => ( + handleUpdate(option.value)} + title={option.name} + multiple + /> + ))} + setIsDateFilterModalOpen(true)} title="Custom" multiple /> + + ) : ( +

No matches found

+ )} +
+ )} + + ); +}); diff --git a/web/components/cycles/dropdowns/filters/index.ts b/web/components/cycles/dropdowns/filters/index.ts new file mode 100644 index 00000000000..3d097b6f06a --- /dev/null +++ b/web/components/cycles/dropdowns/filters/index.ts @@ -0,0 +1,4 @@ +export * from "./end-date"; +export * from "./root"; +export * from "./start-date"; +export * from "./status"; diff --git a/web/components/cycles/dropdowns/filters/root.tsx b/web/components/cycles/dropdowns/filters/root.tsx new file mode 100644 index 00000000000..57e9ec90c4e --- /dev/null +++ b/web/components/cycles/dropdowns/filters/root.tsx @@ -0,0 +1,72 @@ +import { useState } from "react"; +import { observer } from "mobx-react-lite"; +import { Search, X } from "lucide-react"; +import { TCycleFilters, TCycleGroups } from "@plane/types"; +// components +import { FilterEndDate, FilterStartDate, FilterStatus } from "@/components/cycles"; +// types + +type Props = { + filters: TCycleFilters; + handleFiltersUpdate: (key: keyof TCycleFilters, value: string | string[]) => void; + isArchived?: boolean; +}; + +export const CycleFiltersSelection: React.FC = observer((props) => { + const { filters, handleFiltersUpdate, isArchived = false } = props; + // states + const [filtersSearchQuery, setFiltersSearchQuery] = useState(""); + + return ( +
+
+
+ + setFiltersSearchQuery(e.target.value)} + autoFocus + /> + {filtersSearchQuery !== "" && ( + + )} +
+
+
+ {/* cycle status */} + {!isArchived && ( +
+ handleFiltersUpdate("status", val)} + searchQuery={filtersSearchQuery} + /> +
+ )} + + {/* start date */} +
+ handleFiltersUpdate("start_date", val)} + searchQuery={filtersSearchQuery} + /> +
+ + {/* end date */} +
+ handleFiltersUpdate("end_date", val)} + searchQuery={filtersSearchQuery} + /> +
+
+
+ ); +}); diff --git a/web/components/cycles/dropdowns/filters/start-date.tsx b/web/components/cycles/dropdowns/filters/start-date.tsx new file mode 100644 index 00000000000..2b55ada35aa --- /dev/null +++ b/web/components/cycles/dropdowns/filters/start-date.tsx @@ -0,0 +1,65 @@ +import React, { useState } from "react"; +import { observer } from "mobx-react-lite"; + +// components +import { DateFilterModal } from "@/components/core"; +import { FilterHeader, FilterOption } from "@/components/issues"; +// constants +import { DATE_AFTER_FILTER_OPTIONS } from "@/constants/filters"; + +type Props = { + appliedFilters: string[] | null; + handleUpdate: (val: string | string[]) => void; + searchQuery: string; +}; + +export const FilterStartDate: React.FC = observer((props) => { + const { appliedFilters, handleUpdate, searchQuery } = props; + + const [previewEnabled, setPreviewEnabled] = useState(true); + const [isDateFilterModalOpen, setIsDateFilterModalOpen] = useState(false); + + const appliedFiltersCount = appliedFilters?.length ?? 0; + + const filteredOptions = DATE_AFTER_FILTER_OPTIONS.filter((d) => + d.name.toLowerCase().includes(searchQuery.toLowerCase()) + ); + + return ( + <> + {isDateFilterModalOpen && ( + setIsDateFilterModalOpen(false)} + isOpen={isDateFilterModalOpen} + onSelect={(val) => handleUpdate(val)} + title="Start date" + /> + )} + 0 ? ` (${appliedFiltersCount})` : ""}`} + isPreviewEnabled={previewEnabled} + handleIsPreviewEnabled={() => setPreviewEnabled(!previewEnabled)} + /> + {previewEnabled && ( +
+ {filteredOptions.length > 0 ? ( + <> + {filteredOptions.map((option) => ( + handleUpdate(option.value)} + title={option.name} + multiple + /> + ))} + setIsDateFilterModalOpen(true)} title="Custom" multiple /> + + ) : ( +

No matches found

+ )} +
+ )} + + ); +}); diff --git a/web/components/cycles/dropdowns/filters/status.tsx b/web/components/cycles/dropdowns/filters/status.tsx new file mode 100644 index 00000000000..b1f07c0b1d5 --- /dev/null +++ b/web/components/cycles/dropdowns/filters/status.tsx @@ -0,0 +1,49 @@ +import React, { useState } from "react"; +import { observer } from "mobx-react-lite"; +import { TCycleGroups } from "@plane/types"; +// components +import { FilterHeader, FilterOption } from "@/components/issues"; +// types +import { CYCLE_STATUS } from "@/constants/cycle"; +// constants + +type Props = { + appliedFilters: TCycleGroups[] | null; + handleUpdate: (val: string) => void; + searchQuery: string; +}; + +export const FilterStatus: React.FC = observer((props) => { + const { appliedFilters, handleUpdate, searchQuery } = props; + // states + const [previewEnabled, setPreviewEnabled] = useState(true); + + const appliedFiltersCount = appliedFilters?.length ?? 0; + const filteredOptions = CYCLE_STATUS.filter((p) => p.value.includes(searchQuery.toLowerCase())); + + return ( + <> + 0 ? ` (${appliedFiltersCount})` : ""}`} + isPreviewEnabled={previewEnabled} + handleIsPreviewEnabled={() => setPreviewEnabled(!previewEnabled)} + /> + {previewEnabled && ( +
+ {filteredOptions.length > 0 ? ( + filteredOptions.map((status) => ( + handleUpdate(status.value)} + title={status.title} + /> + )) + ) : ( +

No matches found

+ )} +
+ )} + + ); +}); diff --git a/web/components/cycles/dropdowns/index.ts b/web/components/cycles/dropdowns/index.ts new file mode 100644 index 00000000000..302e3a1a6ed --- /dev/null +++ b/web/components/cycles/dropdowns/index.ts @@ -0,0 +1 @@ +export * from "./filters"; diff --git a/web/components/cycles/form.tsx b/web/components/cycles/form.tsx index 799d8043828..f8092f8d066 100644 --- a/web/components/cycles/form.tsx +++ b/web/components/cycles/form.tsx @@ -1,14 +1,15 @@ import { useEffect } from "react"; + import { Controller, useForm } from "react-hook-form"; -// components -import { DateRangeDropdown, ProjectDropdown } from "components/dropdowns"; -// ui -import { Button, Input, TextArea } from "@plane/ui"; -// helpers -import { renderFormattedPayloadDate } from "helpers/date-time.helper"; -// types import { ICycle } from "@plane/types"; +import { Button, Input, TextArea } from "@plane/ui"; + +import { DateRangeDropdown, ProjectDropdown } from "@/components/dropdowns"; + +import { getDate, renderFormattedPayloadDate } from "@/helpers/date-time.helper"; +import { shouldRenderProject } from "@/helpers/project.helper"; + type Props = { handleFormSubmit: (values: Partial, dirtyFields: any) => Promise; handleClose: () => void; @@ -66,6 +67,7 @@ export const CycleForm: React.FC = (props) => { setActiveProject(val); }} buttonVariant="background-with-text" + renderCondition={(project) => shouldRenderProject(project)} tabIndex={7} /> )} @@ -111,7 +113,7 @@ export const CycleForm: React.FC = (props) => { id="cycle_description" name="description" placeholder="Description..." - className="!h-24 w-full resize-none text-sm" + className="w-full text-sm resize-none min-h-24" hasError={Boolean(errors?.description)} value={value} onChange={onChange} @@ -135,8 +137,8 @@ export const CycleForm: React.FC = (props) => { className="h-7" minDate={new Date()} value={{ - from: startDateValue ? new Date(startDateValue) : undefined, - to: endDateValue ? new Date(endDateValue) : undefined, + from: getDate(startDateValue), + to: getDate(endDateValue), }} onSelect={(val) => { onChangeStartDate(val?.from ? renderFormattedPayloadDate(val.from) : null); diff --git a/web/components/cycles/gantt-chart/blocks.tsx b/web/components/cycles/gantt-chart/blocks.tsx index 5d82c94a863..acc081b4d5b 100644 --- a/web/components/cycles/gantt-chart/blocks.tsx +++ b/web/components/cycles/gantt-chart/blocks.tsx @@ -1,11 +1,13 @@ -import { useRouter } from "next/router"; import { observer } from "mobx-react"; +import Link from "next/link"; +import { useRouter } from "next/router"; // hooks -import { useApplication, useCycle } from "hooks/store"; // ui import { Tooltip, ContrastIcon } from "@plane/ui"; // helpers -import { renderFormattedDate } from "helpers/date-time.helper"; +import { renderFormattedDate } from "@/helpers/date-time.helper"; +import { useApplication, useCycle } from "@/hooks/store"; +import { usePlatformOS } from "@/hooks/use-platform-os"; type Props = { cycleId: string; @@ -22,7 +24,7 @@ export const CycleGanttBlock: React.FC = observer((props) => { const { getCycleById } = useCycle(); // derived values const cycleDetails = getCycleById(cycleId); - + const { isMobile } = usePlatformOS(); const cycleStatus = cycleDetails?.status.toLocaleLowerCase(); return ( @@ -33,17 +35,18 @@ export const CycleGanttBlock: React.FC = observer((props) => { cycleStatus === "current" ? "#09a953" : cycleStatus === "upcoming" - ? "#f7ae59" - : cycleStatus === "completed" - ? "#3f76ff" - : cycleStatus === "draft" - ? "rgb(var(--color-text-200))" - : "", + ? "#f7ae59" + : cycleStatus === "completed" + ? "#3f76ff" + : cycleStatus === "draft" + ? "rgb(var(--color-text-200))" + : "", }} onClick={() => router.push(`/${workspaceSlug}/projects/${cycleDetails?.project_id}/cycles/${cycleDetails?.id}`)} >
{cycleDetails?.name}
@@ -63,8 +66,6 @@ export const CycleGanttBlock: React.FC = observer((props) => { export const CycleGanttSidebarBlock: React.FC = observer((props) => { const { cycleId } = props; - // router - const router = useRouter(); // store hooks const { router: { workspaceSlug }, @@ -76,9 +77,9 @@ export const CycleGanttSidebarBlock: React.FC = observer((props) => { const cycleStatus = cycleDetails?.status.toLocaleLowerCase(); return ( -
router.push(`/${workspaceSlug}/projects/${cycleDetails?.project_id}/cycles/${cycleDetails?.id}`)} + href={`/${workspaceSlug}/projects/${cycleDetails?.project_id}/cycles/${cycleDetails?.id}`} > = observer((props) => { cycleStatus === "current" ? "#09a953" : cycleStatus === "upcoming" - ? "#f7ae59" - : cycleStatus === "completed" - ? "#3f76ff" - : cycleStatus === "draft" - ? "rgb(var(--color-text-200))" - : "" + ? "#f7ae59" + : cycleStatus === "completed" + ? "#3f76ff" + : cycleStatus === "draft" + ? "rgb(var(--color-text-200))" + : "" }`} />
{cycleDetails?.name}
-
+ ); }); diff --git a/web/components/cycles/gantt-chart/cycles-list-layout.tsx b/web/components/cycles/gantt-chart/cycles-list-layout.tsx index 646333aad90..6d84f73f061 100644 --- a/web/components/cycles/gantt-chart/cycles-list-layout.tsx +++ b/web/components/cycles/gantt-chart/cycles-list-layout.tsx @@ -1,15 +1,15 @@ import { FC } from "react"; -import { useRouter } from "next/router"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; +import { ICycle } from "@plane/types"; // hooks -import { useCycle, useUser } from "hooks/store"; +import { CycleGanttBlock } from "@/components/cycles"; +import { GanttChartRoot, IBlockUpdateData, CycleGanttSidebar } from "@/components/gantt-chart"; +import { getDate } from "@/helpers/date-time.helper"; +import { useCycle } from "@/hooks/store"; // components -import { GanttChartRoot, IBlockUpdateData, CycleGanttSidebar } from "components/gantt-chart"; -import { CycleGanttBlock } from "components/cycles"; // types -import { ICycle } from "@plane/types"; // constants -import { EUserProjectRoles } from "constants/project"; type Props = { workspaceSlug: string; @@ -22,9 +22,6 @@ export const CyclesListGanttChartView: FC = observer((props) => { const router = useRouter(); const { workspaceSlug } = router.query; // store hooks - const { - membership: { currentProjectRole }, - } = useUser(); const { getCycleById, updateCycleDetails } = useCycle(); const handleCycleUpdate = async (cycle: ICycle, data: IBlockUpdateData) => { @@ -45,16 +42,13 @@ export const CyclesListGanttChartView: FC = observer((props) => { data: block, id: block?.id ?? "", sort_order: block?.sort_order ?? 0, - start_date: new Date(block?.start_date ?? ""), - target_date: new Date(block?.end_date ?? ""), + start_date: getDate(block?.start_date), + target_date: getDate(block?.end_date), })); return structuredBlocks; }; - const isAllowed = - currentProjectRole && [EUserProjectRoles.ADMIN, EUserProjectRoles.MEMBER].includes(currentProjectRole); - return (
= observer((props) => { enableBlockLeftResize={false} enableBlockRightResize={false} enableBlockMove={false} - enableReorder={isAllowed} + enableReorder={false} />
); diff --git a/web/components/cycles/index.ts b/web/components/cycles/index.ts index db5e9de9eea..b1b718175eb 100644 --- a/web/components/cycles/index.ts +++ b/web/components/cycles/index.ts @@ -1,17 +1,19 @@ -export * from "./cycles-view"; -export * from "./active-cycle-details"; -export * from "./active-cycle-stats"; +export * from "./active-cycle"; +export * from "./applied-filters"; +export * from "./board/"; +export * from "./dropdowns"; export * from "./gantt-chart"; +export * from "./list"; +export * from "./cycle-peek-overview"; +export * from "./cycles-view-header"; export * from "./cycles-view"; +export * from "./delete-modal"; export * from "./form"; export * from "./modal"; +export * from "./quick-actions"; export * from "./sidebar"; export * from "./transfer-issues-modal"; export * from "./transfer-issues"; -export * from "./cycles-list"; -export * from "./cycles-list-item"; -export * from "./cycles-board"; -export * from "./cycles-board-card"; -export * from "./delete-modal"; -export * from "./cycle-peek-overview"; -export * from "./cycles-list-item"; + +// archived cycles +export * from "./archived-cycles"; diff --git a/web/components/cycles/cycles-list-item.tsx b/web/components/cycles/list/cycles-list-item.tsx similarity index 50% rename from web/components/cycles/cycles-list-item.tsx rename to web/components/cycles/list/cycles-list-item.tsx index 31958cd847f..7ee797bb249 100644 --- a/web/components/cycles/cycles-list-item.tsx +++ b/web/components/cycles/list/cycles-list-item.tsx @@ -1,25 +1,24 @@ -import { FC, MouseEvent, useState } from "react"; +import { FC, MouseEvent } from "react"; +import { observer } from "mobx-react"; import Link from "next/link"; import { useRouter } from "next/router"; -import { observer } from "mobx-react"; -// hooks -import { useEventTracker, useCycle, useUser, useMember } from "hooks/store"; -import useToast from "hooks/use-toast"; -// components -import { CycleCreateUpdateModal, CycleDeleteModal } from "components/cycles"; -// ui -import { CustomMenu, Tooltip, CircularProgressIndicator, CycleGroupIcon, AvatarGroup, Avatar } from "@plane/ui"; // icons -import { Check, Info, LinkIcon, Pencil, Star, Trash2, User2 } from "lucide-react"; -// helpers -import { findHowManyDaysLeft, renderFormattedDate } from "helpers/date-time.helper"; -import { copyTextToClipboard } from "helpers/string.helper"; -// constants -import { CYCLE_STATUS } from "constants/cycle"; -import { EUserWorkspaceRoles } from "constants/workspace"; +import { Check, Info, Star, User2 } from "lucide-react"; // types -import { TCycleGroups } from "@plane/types"; -import { CYCLE_FAVORITED, CYCLE_UNFAVORITED } from "constants/event-tracker"; +import type { TCycleGroups } from "@plane/types"; +// ui +import { Tooltip, CircularProgressIndicator, CycleGroupIcon, AvatarGroup, Avatar, setPromiseToast } from "@plane/ui"; +// components +import { CycleQuickActions } from "@/components/cycles"; +// constants +import { CYCLE_STATUS } from "@/constants/cycle"; +import { CYCLE_FAVORITED, CYCLE_UNFAVORITED } from "@/constants/event-tracker"; +import { EUserProjectRoles } from "@/constants/project"; +// helpers +import { findHowManyDaysLeft, getDate, renderFormattedDate } from "@/helpers/date-time.helper"; +// hooks +import { useEventTracker, useCycle, useUser, useMember } from "@/hooks/store"; +import { usePlatformOS } from "@/hooks/use-platform-os"; type TCyclesListItem = { cycleId: string; @@ -29,104 +28,96 @@ type TCyclesListItem = { handleRemoveFromFavorites?: () => void; workspaceSlug: string; projectId: string; + isArchived?: boolean; }; export const CyclesListItem: FC = observer((props) => { - const { cycleId, workspaceSlug, projectId } = props; - // states - const [updateModal, setUpdateModal] = useState(false); - const [deleteModal, setDeleteModal] = useState(false); + const { cycleId, workspaceSlug, projectId, isArchived } = props; // router const router = useRouter(); + // hooks + const { isMobile } = usePlatformOS(); // store hooks - const { setTrackElement, captureEvent } = useEventTracker(); + const { captureEvent } = useEventTracker(); const { membership: { currentProjectRole }, } = useUser(); const { getCycleById, addCycleToFavorites, removeCycleFromFavorites } = useCycle(); const { getUserDetails } = useMember(); - // toast alert - const { setToastAlert } = useToast(); - - const handleCopyText = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - const originURL = typeof window !== "undefined" && window.location.origin ? window.location.origin : ""; - - copyTextToClipboard(`${originURL}/${workspaceSlug}/projects/${projectId}/cycles/${cycleId}`).then(() => { - setToastAlert({ - type: "success", - title: "Link Copied!", - message: "Cycle link copied to clipboard.", - }); - }); - }; const handleAddToFavorites = (e: MouseEvent) => { e.preventDefault(); if (!workspaceSlug || !projectId) return; - addCycleToFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId) - .then(() => { + const addToFavoritePromise = addCycleToFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId).then( + () => { captureEvent(CYCLE_FAVORITED, { cycle_id: cycleId, element: "List layout", state: "SUCCESS", }); - }) - .catch(() => { - setToastAlert({ - type: "error", - title: "Error!", - message: "Couldn't add the cycle to favorites. Please try again.", - }); - }); + } + ); + + setPromiseToast(addToFavoritePromise, { + loading: "Adding cycle to favorites...", + success: { + title: "Success!", + message: () => "Cycle added to favorites.", + }, + error: { + title: "Error!", + message: () => "Couldn't add the cycle to favorites. Please try again.", + }, + }); }; const handleRemoveFromFavorites = (e: MouseEvent) => { e.preventDefault(); if (!workspaceSlug || !projectId) return; - removeCycleFromFavorites(workspaceSlug?.toString(), projectId.toString(), cycleId) - .then(() => { - captureEvent(CYCLE_UNFAVORITED, { - cycle_id: cycleId, - element: "List layout", - state: "SUCCESS", - }); - }) - .catch(() => { - setToastAlert({ - type: "error", - title: "Error!", - message: "Couldn't add the cycle to favorites. Please try again.", - }); + const removeFromFavoritePromise = removeCycleFromFavorites( + workspaceSlug?.toString(), + projectId.toString(), + cycleId + ).then(() => { + captureEvent(CYCLE_UNFAVORITED, { + cycle_id: cycleId, + element: "List layout", + state: "SUCCESS", }); - }; - - const handleEditCycle = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - setTrackElement("Cycles page list layout"); - setUpdateModal(true); - }; + }); - const handleDeleteCycle = (e: MouseEvent) => { - e.preventDefault(); - e.stopPropagation(); - setTrackElement("Cycles page list layout"); - setDeleteModal(true); + setPromiseToast(removeFromFavoritePromise, { + loading: "Removing cycle from favorites...", + success: { + title: "Success!", + message: () => "Cycle removed from favorites.", + }, + error: { + title: "Error!", + message: () => "Couldn't remove the cycle from favorites. Please try again.", + }, + }); }; - const openCycleOverview = (e: MouseEvent) => { + const openCycleOverview = (e: MouseEvent) => { const { query } = router; e.preventDefault(); e.stopPropagation(); - router.push({ - pathname: router.pathname, - query: { ...query, peekCycle: cycleId }, - }); + if (query.peekCycle) { + delete query.peekCycle; + router.push({ + pathname: router.pathname, + query: { ...query }, + }); + } else { + router.push({ + pathname: router.pathname, + query: { ...query, peekCycle: cycleId }, + }); + } }; const cycleDetails = getCycleById(cycleId); @@ -137,10 +128,10 @@ export const CyclesListItem: FC = observer((props) => { // TODO: change this logic once backend fix the response const cycleStatus = cycleDetails.status ? (cycleDetails.status.toLocaleLowerCase() as TCycleGroups) : "draft"; const isCompleted = cycleStatus === "completed"; - const endDate = new Date(cycleDetails.end_date ?? ""); - const startDate = new Date(cycleDetails.start_date ?? ""); + const endDate = getDate(cycleDetails.end_date); + const startDate = getDate(cycleDetails.start_date); - const isEditingAllowed = !!currentProjectRole && currentProjectRole >= EUserWorkspaceRoles.MEMBER; + const isEditingAllowed = !!currentProjectRole && currentProjectRole >= EUserProjectRoles.MEMBER; const cycleTotalIssues = cycleDetails.backlog_issues + @@ -163,21 +154,14 @@ export const CyclesListItem: FC = observer((props) => { return ( <> - setUpdateModal(false)} - workspaceSlug={workspaceSlug} - projectId={projectId} - /> - setDeleteModal(false)} - workspaceSlug={workspaceSlug} - projectId={projectId} - /> - + { + if (isArchived) { + openCycleOverview(e); + } + }} + >
@@ -199,18 +183,22 @@ export const CyclesListItem: FC = observer((props) => {
- + {cycleDetails.name}
-
- +
+ {renderDate && `${renderFormattedDate(startDate) ?? `_ _`} - ${renderFormattedDate(endDate) ?? `_ _`}`} +
+
+
{currentCycle && (
= observer((props) => { : `${currentCycle.label}`}
)} -
-
-
- {renderDate && `${renderFormattedDate(startDate) ?? `_ _`} - ${renderFormattedDate(endDate) ?? `_ _`}`} -
- +
{cycleDetails.assignee_ids?.length > 0 ? ( - {cycleDetails.assignee_ids?.map((assigne_id) => { - const member = getUserDetails(assigne_id); + {cycleDetails.assignee_ids?.map((assignee_id) => { + const member = getUserDetails(assignee_id); return ; })} @@ -248,44 +231,23 @@ export const CyclesListItem: FC = observer((props) => {
- {isEditingAllowed && ( - <> - {cycleDetails.is_favorite ? ( - - ) : ( - - )} - - - {!isCompleted && isEditingAllowed && ( - <> - - - - Edit cycle - - - - - - Delete cycle - - - - )} - - - - Copy cycle link - - - - - )} + {isEditingAllowed && + !isArchived && + (cycleDetails.is_favorite ? ( + + ) : ( + + ))} +
diff --git a/web/components/cycles/list/cycles-list-map.tsx b/web/components/cycles/list/cycles-list-map.tsx new file mode 100644 index 00000000000..7a99f5ab736 --- /dev/null +++ b/web/components/cycles/list/cycles-list-map.tsx @@ -0,0 +1,27 @@ +// components +import { CyclesListItem } from "@/components/cycles"; + +type Props = { + cycleIds: string[]; + projectId: string; + workspaceSlug: string; + isArchived?: boolean; +}; + +export const CyclesListMap: React.FC = (props) => { + const { cycleIds, projectId, workspaceSlug, isArchived } = props; + + return ( + <> + {cycleIds.map((cycleId) => ( + + ))} + + ); +}; diff --git a/web/components/cycles/list/index.ts b/web/components/cycles/list/index.ts new file mode 100644 index 00000000000..46a3557d7fe --- /dev/null +++ b/web/components/cycles/list/index.ts @@ -0,0 +1,3 @@ +export * from "./cycles-list-item"; +export * from "./cycles-list-map"; +export * from "./root"; diff --git a/web/components/cycles/list/root.tsx b/web/components/cycles/list/root.tsx new file mode 100644 index 00000000000..0239f1d788f --- /dev/null +++ b/web/components/cycles/list/root.tsx @@ -0,0 +1,60 @@ +import { FC } from "react"; +import { observer } from "mobx-react-lite"; +import { ChevronRight } from "lucide-react"; +import { Disclosure } from "@headlessui/react"; +// components +import { CyclePeekOverview, CyclesListMap } from "@/components/cycles"; +// helpers +import { cn } from "@/helpers/common.helper"; + +export interface ICyclesList { + completedCycleIds: string[]; + cycleIds: string[]; + workspaceSlug: string; + projectId: string; + isArchived?: boolean; +} + +export const CyclesList: FC = observer((props) => { + const { completedCycleIds, cycleIds, workspaceSlug, projectId, isArchived = false } = props; + + return ( +
+
+
+ + {completedCycleIds.length !== 0 && ( + + + {({ open }) => ( + <> + Completed cycles ({completedCycleIds.length}) + + + )} + + + + + + )} +
+ +
+
+ ); +}); diff --git a/web/components/cycles/modal.tsx b/web/components/cycles/modal.tsx index b22afb2b44d..6ed53eb326d 100644 --- a/web/components/cycles/modal.tsx +++ b/web/components/cycles/modal.tsx @@ -1,17 +1,18 @@ import React, { useEffect, useState } from "react"; import { Dialog, Transition } from "@headlessui/react"; +import type { CycleDateCheckData, ICycle, TCycleTabOptions } from "@plane/types"; // services -import { CycleService } from "services/cycle.service"; +import { TOAST_TYPE, setToast } from "@plane/ui"; +import { CycleForm } from "@/components/cycles"; +import { CYCLE_CREATED, CYCLE_UPDATED } from "@/constants/event-tracker"; +import { useEventTracker, useCycle, useProject } from "@/hooks/store"; +import useLocalStorage from "@/hooks/use-local-storage"; +import { CycleService } from "@/services/cycle.service"; // hooks -import { useEventTracker, useCycle, useProject } from "hooks/store"; -import useToast from "hooks/use-toast"; -import useLocalStorage from "hooks/use-local-storage"; // components -import { CycleForm } from "components/cycles"; +// ui // types -import type { CycleDateCheckData, ICycle, TCycleView } from "@plane/types"; // constants -import { CYCLE_CREATED, CYCLE_UPDATED } from "constants/event-tracker"; type CycleModalProps = { isOpen: boolean; @@ -32,10 +33,8 @@ export const CycleCreateUpdateModal: React.FC = (props) => { const { captureCycleEvent } = useEventTracker(); const { workspaceProjectIds } = useProject(); const { createCycle, updateCycleDetails } = useCycle(); - // toast alert - const { setToastAlert } = useToast(); - const { setValue: setCycleTab } = useLocalStorage("cycle_tab", "active"); + const { setValue: setCycleTab } = useLocalStorage("cycle_tab", "active"); const handleCreateCycle = async (payload: Partial) => { if (!workspaceSlug || !projectId) return; @@ -43,8 +42,8 @@ export const CycleCreateUpdateModal: React.FC = (props) => { const selectedProjectId = payload.project_id ?? projectId.toString(); await createCycle(workspaceSlug, selectedProjectId, payload) .then((res) => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Cycle created successfully.", }); @@ -54,10 +53,10 @@ export const CycleCreateUpdateModal: React.FC = (props) => { }); }) .catch((err) => { - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", - message: err.detail ?? "Error in creating cycle. Please try again.", + message: err?.detail ?? "Error in creating cycle. Please try again.", }); captureCycleEvent({ eventName: CYCLE_CREATED, @@ -77,8 +76,8 @@ export const CycleCreateUpdateModal: React.FC = (props) => { eventName: CYCLE_UPDATED, payload: { ...res, changed_properties: changed_properties, state: "SUCCESS" }, }); - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Cycle updated successfully.", }); @@ -88,18 +87,18 @@ export const CycleCreateUpdateModal: React.FC = (props) => { eventName: CYCLE_UPDATED, payload: { ...payload, state: "FAILED" }, }); - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", - message: err.detail ?? "Error in updating cycle. Please try again.", + message: err?.detail ?? "Error in updating cycle. Please try again.", }); }); }; - const dateChecker = async (payload: CycleDateCheckData) => { + const dateChecker = async (projectId: string, payload: CycleDateCheckData) => { let status = false; - await cycleService.cycleDateCheck(workspaceSlug as string, projectId as string, payload).then((res) => { + await cycleService.cycleDateCheck(workspaceSlug, projectId, payload).then((res) => { status = res.status; }); @@ -117,13 +116,13 @@ export const CycleCreateUpdateModal: React.FC = (props) => { if (payload.start_date && payload.end_date) { if (data?.start_date && data?.end_date) - isDateValid = await dateChecker({ + isDateValid = await dateChecker(payload.project_id ?? projectId, { start_date: payload.start_date, end_date: payload.end_date, cycle_id: data.id, }); else - isDateValid = await dateChecker({ + isDateValid = await dateChecker(payload.project_id ?? projectId, { start_date: payload.start_date, end_date: payload.end_date, }); @@ -138,8 +137,8 @@ export const CycleCreateUpdateModal: React.FC = (props) => { } handleClose(); } else - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: "You already have a cycle on the given dates, if you want to create a draft cycle, remove the dates.", }); diff --git a/web/components/cycles/quick-actions.tsx b/web/components/cycles/quick-actions.tsx new file mode 100644 index 00000000000..215f07beff2 --- /dev/null +++ b/web/components/cycles/quick-actions.tsx @@ -0,0 +1,184 @@ +import { useState } from "react"; +import { observer } from "mobx-react"; +import { useRouter } from "next/router"; +// icons +import { ArchiveRestoreIcon, LinkIcon, Pencil, Trash2 } from "lucide-react"; +// ui +import { ArchiveIcon, CustomMenu, TOAST_TYPE, setToast } from "@plane/ui"; +// components +import { ArchiveCycleModal, CycleCreateUpdateModal, CycleDeleteModal } from "@/components/cycles"; +// constants +import { EUserProjectRoles } from "@/constants/project"; +// helpers +import { copyUrlToClipboard } from "@/helpers/string.helper"; +// hooks +import { useCycle, useEventTracker, useUser } from "@/hooks/store"; + +type Props = { + cycleId: string; + projectId: string; + workspaceSlug: string; + isArchived?: boolean; +}; + +export const CycleQuickActions: React.FC = observer((props) => { + const { cycleId, projectId, workspaceSlug, isArchived } = props; + // router + const router = useRouter(); + // states + const [updateModal, setUpdateModal] = useState(false); + const [archiveCycleModal, setArchiveCycleModal] = useState(false); + const [deleteModal, setDeleteModal] = useState(false); + // store hooks + const { setTrackElement } = useEventTracker(); + const { + membership: { currentWorkspaceAllProjectsRole }, + } = useUser(); + const { getCycleById, restoreCycle } = useCycle(); + // derived values + const cycleDetails = getCycleById(cycleId); + const isCompleted = cycleDetails?.status.toLowerCase() === "completed"; + // auth + const isEditingAllowed = + !!currentWorkspaceAllProjectsRole && currentWorkspaceAllProjectsRole[projectId] >= EUserProjectRoles.MEMBER; + + const handleCopyText = (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + + copyUrlToClipboard(`${workspaceSlug}/projects/${projectId}/cycles/${cycleId}`).then(() => { + setToast({ + type: TOAST_TYPE.SUCCESS, + title: "Link Copied!", + message: "Cycle link copied to clipboard.", + }); + }); + }; + + const handleEditCycle = (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + setTrackElement("Cycles page list layout"); + setUpdateModal(true); + }; + + const handleArchiveCycle = (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + setArchiveCycleModal(true); + }; + + const handleRestoreCycle = async (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + await restoreCycle(workspaceSlug, projectId, cycleId) + .then(() => { + setToast({ + type: TOAST_TYPE.SUCCESS, + title: "Restore success", + message: "Your cycle can be found in project cycles.", + }); + router.push(`/${workspaceSlug}/projects/${projectId}/cycles/${cycleId}`); + }) + .catch(() => + setToast({ + type: TOAST_TYPE.ERROR, + title: "Error!", + message: "Cycle could not be restored. Please try again.", + }) + ); + }; + + const handleDeleteCycle = (e: React.MouseEvent) => { + e.preventDefault(); + e.stopPropagation(); + setTrackElement("Cycles page list layout"); + setDeleteModal(true); + }; + + return ( + <> + {cycleDetails && ( +
+ setUpdateModal(false)} + workspaceSlug={workspaceSlug} + projectId={projectId} + /> + setArchiveCycleModal(false)} + /> + setDeleteModal(false)} + workspaceSlug={workspaceSlug} + projectId={projectId} + /> +
+ )} + + {!isCompleted && isEditingAllowed && !isArchived && ( + + + + Edit cycle + + + )} + {isEditingAllowed && !isArchived && ( + + {isCompleted ? ( +
+ + Archive cycle +
+ ) : ( +
+ +
+

Archive cycle

+

+ Only completed cycle
can be archived. +

+
+
+ )} +
+ )} + {isEditingAllowed && isArchived && ( + + + + Restore cycle + + + )} + {!isArchived && ( + + + + Copy cycle link + + + )} + {!isCompleted && isEditingAllowed && ( +
+ + + + Delete cycle + + +
+ )} +
+ + ); +}); diff --git a/web/components/cycles/sidebar.tsx b/web/components/cycles/sidebar.tsx index 646736bd2b6..e333564ee7f 100644 --- a/web/components/cycles/sidebar.tsx +++ b/web/components/cycles/sidebar.tsx @@ -1,37 +1,45 @@ import React, { useEffect, useState } from "react"; -import { useRouter } from "next/router"; +import isEmpty from "lodash/isEmpty"; import { observer } from "mobx-react-lite"; +import { useRouter } from "next/router"; import { Controller, useForm } from "react-hook-form"; -import { Disclosure, Transition } from "@headlessui/react"; -import isEmpty from "lodash/isEmpty"; -// services -import { CycleService } from "services/cycle.service"; -// hooks -import { useEventTracker, useCycle, useUser, useMember } from "hooks/store"; -import useToast from "hooks/use-toast"; -// components -import { SidebarProgressStats } from "components/core"; -import ProgressChart from "components/core/sidebar/progress-chart"; -import { CycleDeleteModal } from "components/cycles/delete-modal"; -// ui -import { Avatar, CustomMenu, Loader, LayersIcon } from "@plane/ui"; // icons -import { ChevronDown, LinkIcon, Trash2, UserCircle2, AlertCircle, ChevronRight, CalendarClock } from "lucide-react"; -// helpers -import { copyUrlToClipboard } from "helpers/string.helper"; -import { findHowManyDaysLeft, renderFormattedPayloadDate } from "helpers/date-time.helper"; +import { + ArchiveRestoreIcon, + ChevronDown, + LinkIcon, + Trash2, + UserCircle2, + AlertCircle, + ChevronRight, + CalendarClock, +} from "lucide-react"; +import { Disclosure, Transition } from "@headlessui/react"; // types import { ICycle } from "@plane/types"; +// ui +import { Avatar, ArchiveIcon, CustomMenu, Loader, LayersIcon, TOAST_TYPE, setToast, TextArea } from "@plane/ui"; +// components +import { SidebarProgressStats } from "@/components/core"; +import ProgressChart from "@/components/core/sidebar/progress-chart"; +import { ArchiveCycleModal, CycleDeleteModal } from "@/components/cycles"; +import { DateRangeDropdown } from "@/components/dropdowns"; // constants -import { EUserWorkspaceRoles } from "constants/workspace"; -import { CYCLE_UPDATED } from "constants/event-tracker"; -// fetch-keys -import { CYCLE_STATUS } from "constants/cycle"; -import { DateRangeDropdown } from "components/dropdowns"; +import { CYCLE_STATUS } from "@/constants/cycle"; +import { CYCLE_UPDATED } from "@/constants/event-tracker"; +import { EUserWorkspaceRoles } from "@/constants/workspace"; +// helpers +import { findHowManyDaysLeft, getDate, renderFormattedPayloadDate } from "@/helpers/date-time.helper"; +import { copyUrlToClipboard } from "@/helpers/string.helper"; +// hooks +import { useEventTracker, useCycle, useUser, useMember } from "@/hooks/store"; +// services +import { CycleService } from "@/services/cycle.service"; type Props = { cycleId: string; handleClose: () => void; + isArchived?: boolean; }; const defaultValues: Partial = { @@ -44,8 +52,9 @@ const cycleService = new CycleService(); // TODO: refactor the whole component export const CycleDetailsSidebar: React.FC = observer((props) => { - const { cycleId, handleClose } = props; + const { cycleId, handleClose, isArchived } = props; // states + const [archiveCycleModal, setArchiveCycleModal] = useState(false); const [cycleDeleteModal, setCycleDeleteModal] = useState(false); // router const router = useRouter(); @@ -55,13 +64,11 @@ export const CycleDetailsSidebar: React.FC = observer((props) => { const { membership: { currentProjectRole }, } = useUser(); - const { getCycleById, updateCycleDetails } = useCycle(); + const { getCycleById, updateCycleDetails, restoreCycle } = useCycle(); const { getUserDetails } = useMember(); // derived values const cycleDetails = getCycleById(cycleId); const cycleOwnerDetails = cycleDetails ? getUserDetails(cycleDetails.owned_by_id) : undefined; - // toast alert - const { setToastAlert } = useToast(); // form info const { control, reset } = useForm({ defaultValues, @@ -98,20 +105,41 @@ export const CycleDetailsSidebar: React.FC = observer((props) => { const handleCopyText = () => { copyUrlToClipboard(`${workspaceSlug}/projects/${projectId}/cycles/${cycleId}`) .then(() => { - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Link Copied!", message: "Cycle link copied to clipboard.", }); }) .catch(() => { - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Some error occurred", }); }); }; + const handleRestoreCycle = async () => { + if (!workspaceSlug || !projectId) return; + + await restoreCycle(workspaceSlug.toString(), projectId.toString(), cycleId) + .then(() => { + setToast({ + type: TOAST_TYPE.SUCCESS, + title: "Restore success", + message: "Your cycle can be found in project cycles.", + }); + router.push(`/${workspaceSlug.toString()}/projects/${projectId.toString()}/cycles/${cycleId}`); + }) + .catch(() => + setToast({ + type: TOAST_TYPE.ERROR, + title: "Error!", + message: "Cycle could not be restored. Please try again.", + }) + ); + }; + useEffect(() => { if (cycleDetails) reset({ @@ -147,14 +175,14 @@ export const CycleDetailsSidebar: React.FC = observer((props) => { if (isDateValid) { submitChanges(payload, "date_range"); - setToastAlert({ - type: "success", + setToast({ + type: TOAST_TYPE.SUCCESS, title: "Success!", message: "Cycle updated successfully.", }); } else { - setToastAlert({ - type: "error", + setToast({ + type: TOAST_TYPE.ERROR, title: "Error!", message: "You already have a cycle on the given dates, if you want to create a draft cycle, you can do that by removing both the dates.", @@ -186,8 +214,11 @@ export const CycleDetailsSidebar: React.FC = observer((props) => { const cycleStatus = cycleDetails?.status.toLocaleLowerCase(); const isCompleted = cycleStatus === "completed"; - const isStartValid = new Date(`${cycleDetails?.start_date}`) <= new Date(); - const isEndValid = new Date(`${cycleDetails?.end_date}`) >= new Date(`${cycleDetails?.start_date}`); + const startDate = getDate(cycleDetails?.start_date); + const endDate = getDate(cycleDetails?.end_date); + + const isStartValid = startDate && startDate <= new Date(); + const isEndValid = endDate && startDate && endDate >= startDate; const progressPercentage = cycleDetails ? isCompleted && cycleDetails?.progress_snapshot @@ -228,19 +259,28 @@ export const CycleDetailsSidebar: React.FC = observer((props) => { const isEditingAllowed = !!currentProjectRole && currentProjectRole >= EUserWorkspaceRoles.MEMBER; return ( - <> +
{cycleDetails && workspaceSlug && projectId && ( - setCycleDeleteModal(false)} - workspaceSlug={workspaceSlug.toString()} - projectId={projectId.toString()} - /> + <> + setArchiveCycleModal(false)} + /> + setCycleDeleteModal(false)} + workspaceSlug={workspaceSlug.toString()} + projectId={projectId.toString()} + /> + )} <> -
+
- - {!isCompleted && isEditingAllowed && ( + {!isArchived && ( + + )} + {isEditingAllowed && ( - { - setTrackElement("CYCLE_PAGE_SIDEBAR"); - setCycleDeleteModal(true); - }} - > - - - Delete cycle - - + {!isArchived && ( + setArchiveCycleModal(true)} disabled={!isCompleted}> + {isCompleted ? ( +
+ + Archive cycle +
+ ) : ( +
+ +
+

Archive cycle

+

+ Only completed cycle
can be archived. +

+
+
+ )} +
+ )} + {isArchived && ( + + + + Restore cycle + + + )} + {!isCompleted && ( + { + setTrackElement("CYCLE_PAGE_SIDEBAR"); + setCycleDeleteModal(true); + }} + > + + + Delete cycle + + + )}
)}
@@ -291,9 +363,11 @@ export const CycleDetailsSidebar: React.FC = observer((props) => {
{cycleDetails.description && ( - - {cycleDetails.description} - +