diff --git a/.env.example b/.env.example index 57a20c7a6..7c6ce037f 100644 --- a/.env.example +++ b/.env.example @@ -1,29 +1,52 @@ -# Required -AUTH_SECRET="" # openssl rand -base64 32 -DATABASE_URL="" # Format: "postgresql://postgres:pass@127.0.0.1:5432/comp" -RESEND_DOMAIN="" # Domain configured in Resend, e.g. mail.trycomp.ai -RESEND_API_KEY="" # API key from Resend for email authentication / invites -RESEND_FROM_MARKETING="Lewis Carhart " -RESEND_FROM_SYSTEM="Comp AI " -RESEND_FROM_DEFAULT="Comp AI " -RESEND_TO_TEST="mail@mail.trycomp.ai" -RESEND_REPLY_TO_MARKETING="lewis@mail.trycomp.ai" -REVALIDATION_SECRET="" # openssl rand -base64 32 -NEXT_PUBLIC_PORTAL_URL="http://localhost:3002" # The employee portal uses port 3002 by default +# Commented out variables are optional -- NOTE: This is for local Docker-compose use only, additional variables will need to be set to deploy on development+ -# Recommended -# Store attachemnts in any S3 compatible bucket, we use AWS -APP_AWS_ACCESS_KEY_ID="" # AWS Access Key ID -APP_AWS_SECRET_ACCESS_KEY="" # AWS Secret Access Key -APP_AWS_REGION="" # AWS Region -APP_AWS_BUCKET_NAME="" # AWS Bucket Name - -TRIGGER_SECRET_KEY="" # For background jobs. Self-host or use cloud-version @ https://trigger.dev -# TRIGGER_API_URL="" # Only set if you are self-hosting -TRIGGER_API_KEY="" # API key from Trigger.dev -TRIGGER_SECRET_KEY="" # Secret key from Trigger.dev - -OPENAI_API_KEY="" # AI Chat + Auto Generated Policies, Risks + Vendors -FIRECRAWL_API_KEY="" # For research, self-host or use cloud-version @ https://firecrawl.dev - -AUTH_TRUSTED_ORIGINS=http://localhost:3000,https://*.trycomp.ai,http://localhost:3002 +APP_AWS_ACCESS_KEY_ID= +APP_AWS_BUCKET_NAME= +APP_AWS_REGION= +APP_AWS_SECRET_ACCESS_KEY= +APP_ENVIRONMENT="local" # Options are "local", "development", "staging", "production" +# AUTH_GITHUB_ID= +# AUTH_GITHUB_SECRET= +# AUTH_GOOGLE_ID= +# AUTH_GOOGLE_SECRET= +# AUTH_SECRET= +BETTER_AUTH_SECRET= +DATABASE_URL= +# DUB_API_KEY= +# DUB_REFER_URL= +FIRECRAWL_API_KEY= +# FLEET_URL= +# FLEET_TOKEN= +FORCE_DATABASE_WIPE_AND_RESEED="false" # Set to "true" to wipe and reseed Database +# GA4_API_SECRET= +# GA4_MEASUREMENT_ID= +# LINKEDIN_CONVERSIONS_ACCESS_TOKEN= +# NEXT_PUBLIC_API_URL= +# NEXT_PUBLIC_BETTER_AUTH_URL= +# NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL= +# NEXT_PUBLIC_GTM_ID= +# NEXT_PUBLIC_IS_DUB_ENABLED= +# NEXT_PUBLIC_LINKEDIN_CONVERSION_ID= +# NEXT_PUBLIC_LINKEDIN_PARTNER_ID= +# NEXT_PUBLIC_PORTAL_URL= +# NEXT_PUBLIC_POSTHOG_HOST= +# NEXT_PUBLIC_POSTHOG_KEY= +OPENAI_API_KEY= +REVALIDATION_SECRET= +RESEND_API_KEY= +RESEND_DOMAIN= +RESEND_FROM_DEFAULT= +RESEND_FROM_MARKETING= +RESEND_FROM_SYSTEM= +# TRIGGER_API_KEY= +# TRIGGER_API_URL= +TRIGGER_ACCESS_TOKEN= +TRIGGER_PROJECT_ID= +TRIGGER_SECRET_KEY= +# TRIGGER_QUEUE_CONCURRENCY=10 +# TRUST_PORTAL_PROJECT_ID= +UPSTASH_REDIS_REST_TOKEN= +UPSTASH_REDIS_REST_URL= +# VERCEL_ACCESS_TOKEN= +# VERCEL_PROJECT_ID= +# VERCEL_TEAM_ID= diff --git a/.husky/commit-msg b/.husky/commit-msg index 766bd7721..a78cc751d 100755 --- a/.husky/commit-msg +++ b/.husky/commit-msg @@ -1,4 +1 @@ -#!/usr/bin/env sh -. "$(dirname "$0")/_/husky.sh" - npx commitlint --edit $1 diff --git a/Dockerfile b/Dockerfile index b22294546..4c6490760 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1.6 # ============================================================================= # STAGE 1: Dependencies - Install and cache workspace dependencies # ============================================================================= @@ -45,8 +46,22 @@ RUN bun install RUN cp -R packages/db/prisma/migrations node_modules/@trycompai/db/dist/ # Run migrations against the combined schema published by @trycompai/db -RUN echo "Running migrations against @trycompai/db combined schema" -CMD ["bunx", "prisma", "migrate", "deploy", "--schema=node_modules/@trycompai/db/dist/schema.prisma"] +RUN cat <<'EOF' > /migrate.sh +#!/bin/sh +set -eu + +echo "[Migrator] Starting prisma migrate deploy" + +if [ "${FORCE_DATABASE_WIPE_AND_RESEED:-false}" = "true" ]; then + echo "[Migrator] FORCE_DATABASE_WIPE_AND_RESEED=true detected. Resetting database before running migrations." + bunx prisma migrate reset --force --skip-seed --schema=node_modules/@trycompai/db/dist/schema.prisma +fi + +bunx prisma migrate deploy --schema=node_modules/@trycompai/db/dist/schema.prisma +echo "[Migrator] Prisma migrate deploy finished" +EOF +RUN chmod +x /migrate.sh +CMD ["/migrate.sh"] # ============================================================================= # STAGE 3: App Builder @@ -55,6 +70,11 @@ FROM deps AS app-builder WORKDIR /app +# Install system packages needed for Trigger CLI during build +RUN apt-get update \ + && apt-get install -y --no-install-recommends ca-certificates \ + && rm -rf /var/lib/apt/lists/* + # Copy all source code needed for build COPY packages ./packages COPY apps/app ./apps/app @@ -67,8 +87,10 @@ RUN cd packages/db && node scripts/combine-schemas.js RUN cp packages/db/dist/schema.prisma apps/app/prisma/schema.prisma # Ensure Next build has required public env at build-time +ARG TRIGGER_PROJECT_ID ARG NEXT_PUBLIC_BETTER_AUTH_URL ARG NEXT_PUBLIC_PORTAL_URL +ARG APP_ENVIRONMENT ARG NEXT_PUBLIC_POSTHOG_KEY ARG NEXT_PUBLIC_POSTHOG_HOST ARG NEXT_PUBLIC_IS_DUB_ENABLED @@ -77,7 +99,8 @@ ARG NEXT_PUBLIC_LINKEDIN_PARTNER_ID ARG NEXT_PUBLIC_LINKEDIN_CONVERSION_ID ARG NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL ARG NEXT_PUBLIC_API_URL -ENV NEXT_PUBLIC_BETTER_AUTH_URL=$NEXT_PUBLIC_BETTER_AUTH_URL \ +ENV TRIGGER_PROJECT_ID=$TRIGGER_PROJECT_ID \ + NEXT_PUBLIC_BETTER_AUTH_URL=$NEXT_PUBLIC_BETTER_AUTH_URL \ NEXT_PUBLIC_PORTAL_URL=$NEXT_PUBLIC_PORTAL_URL \ NEXT_PUBLIC_POSTHOG_KEY=$NEXT_PUBLIC_POSTHOG_KEY \ NEXT_PUBLIC_POSTHOG_HOST=$NEXT_PUBLIC_POSTHOG_HOST \ @@ -87,6 +110,7 @@ ENV NEXT_PUBLIC_BETTER_AUTH_URL=$NEXT_PUBLIC_BETTER_AUTH_URL \ NEXT_PUBLIC_LINKEDIN_CONVERSION_ID=$NEXT_PUBLIC_LINKEDIN_CONVERSION_ID \ NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL=$NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL \ NEXT_PUBLIC_API_URL=$NEXT_PUBLIC_API_URL \ + APP_ENVIRONMENT=$APP_ENVIRONMENT \ NEXT_TELEMETRY_DISABLED=1 NODE_ENV=production \ NEXT_OUTPUT_STANDALONE=true \ NODE_OPTIONS=--max_old_space_size=6144 @@ -94,11 +118,46 @@ ENV NEXT_PUBLIC_BETTER_AUTH_URL=$NEXT_PUBLIC_BETTER_AUTH_URL \ # Build the app (schema already combined above) RUN cd apps/app && SKIP_ENV_VALIDATION=true bun run build:docker +# Run Trigger.dev deploy during build (pinned version) +RUN --mount=type=secret,id=trigger_env_file \ + sh -c 'set -eu; \ + set -a; \ + . /run/secrets/trigger_env_file; \ + set +a; \ + cd apps/app; \ + CI=1 bun x trigger.dev@4.0.0 deploy --env-file /run/secrets/trigger_env_file' + # ============================================================================= # STAGE 4: App Production # ============================================================================= FROM node:22-alpine AS app +ARG TRIGGER_PROJECT_ID +ARG NEXT_PUBLIC_BETTER_AUTH_URL +ARG NEXT_PUBLIC_PORTAL_URL +ARG APP_ENVIRONMENT +ARG NEXT_PUBLIC_POSTHOG_KEY +ARG NEXT_PUBLIC_POSTHOG_HOST +ARG NEXT_PUBLIC_IS_DUB_ENABLED +ARG NEXT_PUBLIC_GTM_ID +ARG NEXT_PUBLIC_LINKEDIN_PARTNER_ID +ARG NEXT_PUBLIC_LINKEDIN_CONVERSION_ID +ARG NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL +ARG NEXT_PUBLIC_API_URL + +ENV TRIGGER_PROJECT_ID=${TRIGGER_PROJECT_ID} \ + NEXT_PUBLIC_BETTER_AUTH_URL=${NEXT_PUBLIC_BETTER_AUTH_URL} \ + NEXT_PUBLIC_PORTAL_URL=${NEXT_PUBLIC_PORTAL_URL} \ + NEXT_PUBLIC_POSTHOG_KEY=${NEXT_PUBLIC_POSTHOG_KEY} \ + NEXT_PUBLIC_POSTHOG_HOST=${NEXT_PUBLIC_POSTHOG_HOST} \ + NEXT_PUBLIC_IS_DUB_ENABLED=${NEXT_PUBLIC_IS_DUB_ENABLED} \ + NEXT_PUBLIC_GTM_ID=${NEXT_PUBLIC_GTM_ID} \ + NEXT_PUBLIC_LINKEDIN_PARTNER_ID=${NEXT_PUBLIC_LINKEDIN_PARTNER_ID} \ + NEXT_PUBLIC_LINKEDIN_CONVERSION_ID=${NEXT_PUBLIC_LINKEDIN_CONVERSION_ID} \ + NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL=${NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL} \ + NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL} \ + APP_ENVIRONMENT=${APP_ENVIRONMENT} + WORKDIR /app # Copy Next standalone output @@ -106,7 +165,6 @@ COPY --from=app-builder /app/apps/app/.next/standalone ./ COPY --from=app-builder /app/apps/app/.next/static ./apps/app/.next/static COPY --from=app-builder /app/apps/app/public ./apps/app/public - EXPOSE 3000 CMD ["node", "apps/app/server.js"] @@ -153,4 +211,4 @@ COPY --from=portal-builder /app/apps/portal/public ./apps/portal/public EXPOSE 3000 CMD ["node", "apps/portal/server.js"] -# (Trigger.dev hosted; no local runner stage) \ No newline at end of file +# (Trigger.dev hosted; no local runner stage) diff --git a/README.md b/README.md index 1b859c087..a7d27ac66 100644 --- a/README.md +++ b/README.md @@ -73,107 +73,146 @@ To get a local copy up and running, please follow these simple steps. ### Prerequisites -Here is what you need to be able to run Comp AI. +Here is what you need to be able to run Comp AI without Docker. - Node.js (Version: >=20.x) - Bun (Version: >=1.1.36) - Postgres (Version: >=15.x) -## Development +- Turbo (Optional): +```sh + bun add -g turbo +``` -To get the project working locally with all integrations, follow these extended development steps. +### Local Development Setup -### Setup +1. There are 3 main serices that have independent `.env` files, these need to be updated/maintained independently. -1. Clone the repo: +- Main Application: +```sh + cp apps/app/.env.example apps/app/.env +``` - ```sh +- Employee Portal: +```sh + cp apps/portal/.env.example apps/portal/.env +``` + +- Migrator/Seeder Service +```sh + cp packages/db/.env.example packages/db/.env +``` + +2. Clone the repo: + +```sh git clone https://github.com/trycompai/comp.git - ``` +``` -2. Navigate to the project directory: +3. Navigate to the project directory: - ```sh +```sh cd comp - ``` +``` -3. Install dependencies using Bun: +4. Install dependencies using Bun: ```sh bun install ``` -4. Install `concurrently` as a dev dependency: +5. Start the apps: + +```sh + bun run dev +``` + +Or use the Turbo repo script: ```sh - bun add -d concurrently + turbo dev ``` --- -### Environment Setup +### Docker Compose +This still requires you to bring your own Postgres DB, AWS S3 Bucket, Firecrawl API Key, OpenAPI compatible Endpoint, Resend API Key, Trigger Project + Key, and Upstash redis API Key. -- Work to move these into the compose still needs to be done, SeaweedFS should be looked at as an AWS replacment -- -Create the following `.env` files and fill them out with your credentials: +1. Clone the repo: -- `comp/apps/app/.env` -- `comp/apps/portal/.env` -- `comp/packages/db/.env` +```sh + git clone https://github.com/trycompai/comp.git +``` -You can copy from the `.env.example` files: +2. Navigate to the project directory: -### Linux / macOS +```sh + cd comp +``` + +3. Copy the root example file and fill in the required values. ```sh -cp apps/app/.env.example apps/app/.env -cp apps/portal/.env.example apps/portal/.env -cp packages/db/.env.example packages/db/.env + cp .env.example .env ``` -### Windows (Command Prompt) +4. Set the required values (at minimum): +Variables that are uncommeented in `.env.example` are required, the commented out ones are optional and can be enabled as needed. +Setting `FORCE_DATABASE_WIPE_AND_RESEED="true"` will wipe and reseed the local database the next time the containers start. And will continue to do it on every start until you flip it to `FORCE_DATABASE_WIPE_AND_RESEED="false"`. +Anytime you are using docker it is recomended that you set APP_ENVIRONMENT="local" there are other options but their implementation is experimental. -```cmd -copy apps\app\.env.example apps\app\.env -copy apps\portal\.env.example apps\portal\.env -copy packages\db\.env.example packages\db\.env +5. Bring the full stack up: + +```sh + docker compose up --build -d ``` -### Windows (PowerShell) +6. SPECIAL NOTE: If you are getting stuck on resolving metadata provenance or the build hangs (more likely on MacOs) just restart the build, this is normal behavior related to your dns settings and/or a misconfigured envrionment variable which cannot connect to your service at build time. You can use Bake(docker-cacheing) to bypass the provenance hang. -```powershell -Copy-Item apps\app\.env.example -Destination apps\app\.env -Copy-Item apps\portal\.env.example -Destination apps\portal\.env -Copy-Item packages\db\.env.example -Destination packages\db\.env +```sh + docker buildx bake --set *.provenance=false + docker compose up --build --progress plain -d ``` -Additionally, ensure the following required environment variables are added to `.env` in `comp/apps/app/.env`: +To see why your are hanging you can run the following, which will do the docker build but maintain all logs, so you can audit the process. -```env -AUTH_SECRET="" # Use `openssl rand -base64 32` to generate -DATABASE_URL="postgresql://user:password@host:port/database" -RESEND_API_KEY="" # Resend (https://resend.com/api-keys) - Resend Dashboard -> API Keys -NEXT_PUBLIC_PORTAL_URL="http://localhost:3002" -REVALIDATION_SECRET="" # Use `openssl rand -base64 32` to generate +```sh + docker compose build --progress plain ``` -> ✅ Make sure you have all of these variables in your `.env` file. -> If you're copying from `.env.example`, it might be missing the last two (`NEXT_PUBLIC_PORTAL_URL` and `REVALIDATION_SECRET`), so be sure to add them manually. +The compose stack automatically runs database migrations and seeds if your database does not have them yet. +Static images are served without Next.js image optimisation when `APP_ENVIRONMENT=local`, which resolves the server/client mismatch when requesting/serving images. -Some environment variables may not load correctly from `.env` — in such cases, **hard-code** the values directly in the relevant files (see Hardcoding section below). +To force optimisation while still self-hosting, set `SELF_HOSTING=false` in `.env`; setting it to `true` (or leaving it empty while `APP_ENVIRONMENT=local`) keeps optimisation disabled so images/artwork load correctly. -- UnTested But Theoretically Correct-- ---- -### Cloud & Auth Configuration +## Cloud & Auth Configuration -#### 1. Trigger.dev +### 1. Trigger.dev - Required - Create an account on [https://cloud.trigger.dev](https://cloud.trigger.dev) - Create a project and copy the Project ID -- In `comp/apps/app/trigger.config.ts`, set: - ```ts - project: 'proj_****az***ywb**ob*'; - ``` +- Generate a Personal Access Token from **Account → Tokens** (or [https://cloud.trigger.dev/account/tokens](https://cloud.trigger.dev/account/tokens)) and copy the value; this becomes your `TRIGGER_ACCESS_TOKEN`. +- In `comp/apps/app/.env`, set: +```sh + TRIGGER_PROJECT_ID="proj_****az***ywb**ob*" + TRIGGER_ACCESS_TOKEN="tr_pat_***************" +``` +- Optionally set `TRIGGER_QUEUE_CONCURRENCY` (defaults to 10) in `.env` to control how many Trigger.dev jobs run in parallel ie connect to your database. +- Expose your local app and portal to Trigger.dev with a public tunnel (e.g. ngrok). Trigger.dev will need to have a hopy of your env secrets so it can run. There you can place NEXT_PUBLIC_BETTER_AUTH_URL and NEXT_PUBLIC_PORTAL_URL with the public values. This way Trigger.dev can validate its actions are working. +```sh + brew install ngrok + ngrok config add-authtoken + ngrok http 3000 + ngrok http 3002 +``` + +### 2. PostgreSQL Requirements + +- Enable the `pgcrypto` extension on your database (run `CREATE EXTENSION IF NOT EXISTS "pgcrypto";` once per cluster). +- Keep connection pooling in place for Trigger.dev (PgBouncer, Prisma Accelerate/Data Proxy, or managed pooling from your cloud provider) so application and Trigger.dev workloads stay within Postgres `max_connections`. Do not use PgBouncer on the App side, it will cause migrations to fail. -#### 2. Google OAuth +### 3. Google OAuth - Optional - Go to [Google Cloud OAuth Console](https://console.cloud.google.com/auth/clients) - Create an OAuth client: @@ -181,7 +220,7 @@ Some environment variables may not load correctly from `.env` — in such cases, - Name: `comp_app` # You can choose a different name if you prefer! - Add these **Authorized Redirect URIs**: - ``` +``` http://localhost http://localhost:3000 http://localhost:3002 @@ -189,123 +228,49 @@ Some environment variables may not load correctly from `.env` — in such cases, http://localhost:3002/api/auth/callback/google http://localhost:3000/auth http://localhost:3002/auth - ``` +``` - After creating the app, copy the `GOOGLE_ID` and `GOOGLE_SECRET` - - Add them to your `.env` files - - If that doesn’t work, hard-code them in: - ``` - comp/apps/portal/src/app/lib/auth.ts - ``` + - Add them to your `.env` files as `AUTH_GOOGLE_ID` and `AUTH_GOOGLE_SECRET` -#### 3. Redis (Upstash) +### 4. Redis (Upstash) - Required - Go to [https://console.upstash.com](https://console.upstash.com) - Create a Redis database -- Copy the **Redis URL** and **TOKEN** -- Add them to your `.env` file, or hard-code them if the environment variables are not being recognized in: - ``` - comp/packages/kv/src/index.ts - ``` +- Copy the **TOKEN** and **Redis URL** +- Add them to your `.env` files as `UPSTASH_REDIS_REST_TOKEN` and `UPSTASH_REDIS_REST_URL` --- -### Database Setup - -Start and initialize the PostgreSQL database using Docker: - -1. Start the database: - - ```sh - bun docker:up - ``` +### 5. Database Setup = Required-ish (NOT Required for Docker Compose setups) -2. Default credentials: - - Database name: `comp` - - Username: `postgres` - - Password: `postgres` +Start and initialize your own PostgreSQL database. Add the PostgreSQL connection URI to the .env files as `DATABASE_URL` example: DATABASE_URL="postgresql://[username[:password]@]host[:port]/database[?options]" -3. To change the default password: - - ```sql - ALTER USER postgres WITH PASSWORD 'new_password'; - ``` - -4. If you encounter the following error: - - ``` - HINT: No function matches the given name and argument types... - ``` - - Run the fix: - - ```sh - psql "postgresql://postgres:@localhost:5432/comp" -f ./packages/db/prisma/functionDefinition.sql - ``` - - Expected output: `CREATE FUNCTION` - - > 💡 `comp` is the database name. Make sure to use the correct **port** and **database name** for your setup. - -5. Apply schema and seed: +1. Initialize schema and seed - Automatic if using Docker Compose additionally docker compose installations will automatically apply new migrations if there is any on every startup without harming your data. ```sh - # Generate Prisma client - bun db:generate + # Generate Prisma client (Build "Migrator" Templates) + bun db:generate - # Push the schema to the database - bun db:push + # Push the schema to your Postgres database + bun db:push - # Optional: Seed the database with initial data - bun db:seed + # CAUTION: Run only once! - Seed the database with initial data + bun db:seed ``` Other useful database commands: ```sh -# Open Prisma Studio to view/edit data -bun db:studio - -# Run database migrations -bun db:migrate + # Open Prisma Studio to view/edit data + bun db:studio -# Stop the database container -bun docker:down - -# Remove the database container and volume -bun docker:clean + # Run database migrations + bun db:migrate ``` --- -### Start Development - -Once everything is configured: - -```sh -bun run dev -``` - -Or use the Turbo repo script: - -```sh -turbo dev -``` - -> 💡 Make sure you have Turbo installed. If not, you can install it using Bun: - -```sh -bun add -g turbo -``` - -🎉 Yay! You now have a working local instance of Comp AI! 🚀 - -## Deployment - -### Docker - -Steps to deploy Comp AI on Docker are coming soon. - ### Vercel Steps to deploy Comp AI on Vercel are coming soon. @@ -328,25 +293,25 @@ This repository uses semantic-release to automatically publish packages to npm w ### Usage ```bash -# Install a published package -npm install @comp/ui + # Install a published package + npm install @comp/ui -# Use in your project -import { Button } from '@comp/ui/button' -import { client } from '@comp/kv' + # Use in your project + import { Button } from '@comp/ui/button' + import { client } from '@comp/kv' ``` ### Development ```bash -# Build all packages -bun run build + # Build all packages + bun run build -# Build specific package -bun run -F @comp/ui build + # Build specific package + bun run -F @comp/ui build -# Test packages locally -bun run release:packages --dry-run + # Test packages locally + bun run release:packages --dry-run ``` ## Contributors diff --git a/apps/app/.env.example b/apps/app/.env.example index 883e586e4..ceaef850a 100644 --- a/apps/app/.env.example +++ b/apps/app/.env.example @@ -20,4 +20,4 @@ TRIGGER_SECRET_KEY="" # Secret key from Trigger.dev OPENAI_API_KEY="" # AI Chat + Auto Generated Policies, Risks + Vendors FIRECRAWL_API_KEY="" # For research, self-host or use cloud-version @ https://firecrawl.dev - +TRIGGER_PROJECT_ID="" # Project ID from Trigger.dev diff --git a/apps/app/README.md b/apps/app/README.md index e215bc4cc..0d2f091bf 100644 --- a/apps/app/README.md +++ b/apps/app/README.md @@ -2,6 +2,15 @@ This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next- ## Getting Started +### 1. Trigger.dev - Required + +- Create an account on [https://cloud.trigger.dev](https://cloud.trigger.dev) +- Create a project and copy the Project ID +- In `comp/apps/app/.env`, set: +```sh + TRIGGER_PROJECT_ID="proj_****az***ywb**ob*" +``` + First, run the development server: ```bash diff --git a/apps/app/next.config.ts b/apps/app/next.config.ts index 254702356..5ade9d333 100644 --- a/apps/app/next.config.ts +++ b/apps/app/next.config.ts @@ -6,6 +6,16 @@ import path from 'path'; import './src/env.mjs'; const isStandalone = process.env.NEXT_OUTPUT_STANDALONE === 'true'; +const isLocalEnvironment = (process.env.APP_ENVIRONMENT ?? '').toLowerCase() === 'local'; +const isSelfHosting = (() => { + const raw = process.env.SELF_HOSTING; + if (raw) { + const normalized = raw.toLowerCase(); + if (normalized === 'true' || normalized === '1') return true; + if (normalized === 'false' || normalized === '0') return false; + } + return isLocalEnvironment; +})(); const config: NextConfig = { // Ensure Turbopack can import .md files as raw strings during dev @@ -39,9 +49,13 @@ const config: NextConfig = { process.env.NODE_ENV === 'production' && process.env.STATIC_ASSETS_URL ? `${process.env.STATIC_ASSETS_URL}/app` : '', + env: { + NEXT_PUBLIC_DISABLE_IMAGE_OPTIMIZATION: String(isSelfHosting), + }, reactStrictMode: true, transpilePackages: ['@trycompai/db', '@prisma/client'], images: { + unoptimized: isSelfHosting, remotePatterns: [ { protocol: 'https', diff --git a/apps/app/src/actions/organization/lib/initialize-organization.ts b/apps/app/src/actions/organization/lib/initialize-organization.ts index 6eff295be..d4272512d 100644 --- a/apps/app/src/actions/organization/lib/initialize-organization.ts +++ b/apps/app/src/actions/organization/lib/initialize-organization.ts @@ -410,13 +410,19 @@ export const initializeOrganization = async ({ ); } - const result = await db.$transaction(async (tx) => { - return _upsertOrgFrameworkStructureCore({ - organizationId, - targetFrameworkEditorIds: frameworkIds, - frameworkEditorFrameworks: frameworksAndReqsToProcess, - tx, - }); - }); + const result = await db.$transaction( + async (tx) => { + return _upsertOrgFrameworkStructureCore({ + organizationId, + targetFrameworkEditorIds: frameworkIds, + frameworkEditorFrameworks: frameworksAndReqsToProcess, + tx, + }); + }, + { + timeout: 30_000, // higher than default to handle slower DBs without hanging too long + maxWait: 5_000, + }, + ); return result; }; diff --git a/apps/app/src/app/(app)/setup/go/[id]/components/onboarding-status.tsx b/apps/app/src/app/(app)/setup/go/[id]/components/onboarding-status.tsx index b8a84d19a..15db1aa8a 100644 --- a/apps/app/src/app/(app)/setup/go/[id]/components/onboarding-status.tsx +++ b/apps/app/src/app/(app)/setup/go/[id]/components/onboarding-status.tsx @@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'; import { useEffect } from 'react'; export function OnboardingStatus({ runId }: { runId: string }) { - const { run, error, isLoading } = useRun(runId, { + const { run, error, isLoading } = useRun(runId, { refreshInterval: 1000, }); diff --git a/apps/app/src/app/posthog.ts b/apps/app/src/app/posthog.ts index 4ecfbaf78..4b15d4931 100644 --- a/apps/app/src/app/posthog.ts +++ b/apps/app/src/app/posthog.ts @@ -12,6 +12,9 @@ function getPostHogClient(): PostHog | null { const apiKey = process.env.NEXT_PUBLIC_POSTHOG_KEY; const apiHost = process.env.NEXT_PUBLIC_POSTHOG_HOST; + const appEnvironment = process.env.APP_ENVIRONMENT?.toLowerCase(); + const suppressFlag = process.env.SUPPRESS_POSTHOG_WARNING?.toLowerCase(); + const suppressWarning = appEnvironment === 'local' || suppressFlag === 'true'; if (apiKey && apiHost) { posthogInstance = new PostHog(apiKey, { @@ -21,10 +24,12 @@ function getPostHogClient(): PostHog | null { return posthogInstance; } - // If keys are not set, warn and return null - console.warn( - 'PostHog keys (NEXT_PUBLIC_POSTHOG_KEY, NEXT_PUBLIC_POSTHOG_HOST) are not set, tracking is disabled.', - ); + // If keys are not set, optionally warn and return null + if (!suppressWarning) { + console.warn( + 'PostHog keys (NEXT_PUBLIC_POSTHOG_KEY, NEXT_PUBLIC_POSTHOG_HOST) are not set, tracking is disabled.', + ); + } return null; } diff --git a/apps/app/src/components/app-onboarding.tsx b/apps/app/src/components/app-onboarding.tsx index b7a174e54..2091e157c 100644 --- a/apps/app/src/components/app-onboarding.tsx +++ b/apps/app/src/components/app-onboarding.tsx @@ -41,6 +41,8 @@ export function AppOnboarding({ const [open, setOpen] = useQueryState(sheetName ?? 'sheet'); const isOpen = Boolean(open); const { theme } = useTheme(); + const disableImageOptimization = + process.env.NEXT_PUBLIC_DISABLE_IMAGE_OPTIMIZATION === 'true'; return ( @@ -131,6 +133,7 @@ export function AppOnboarding({ height={400} width={400} quality={100} + unoptimized={disableImageOptimization} className="relative z-10 rounded-lg drop-shadow-md" /> diff --git a/apps/app/src/env.mjs b/apps/app/src/env.mjs index 522fe452e..3bb9892ad 100644 --- a/apps/app/src/env.mjs +++ b/apps/app/src/env.mjs @@ -1,6 +1,14 @@ import { createEnv } from '@t3-oss/env-nextjs'; import { z } from 'zod'; +const normalizedAppEnvironment = process.env.APP_ENVIRONMENT?.toLowerCase(); +const normalizedSuppressPosthogWarning = (() => { + const value = process.env.SUPPRESS_POSTHOG_WARNING; + if (!value) return undefined; + const lowered = value.toLowerCase(); + return lowered === 'true' || lowered === 'false' ? lowered : undefined; +})(); + export const env = createEnv({ server: { AUTH_GOOGLE_ID: z.string().optional(), @@ -26,7 +34,7 @@ export const env = createEnv({ APP_AWS_SECRET_ACCESS_KEY: z.string().optional(), APP_AWS_REGION: z.string().optional(), APP_AWS_BUCKET_NAME: z.string().optional(), - NEXT_PUBLIC_PORTAL_URL: z.string(), + NEXT_PUBLIC_PORTAL_URL: z.string().optional(), FIRECRAWL_API_KEY: z.string().optional(), FLEET_URL: z.string().optional(), FLEET_TOKEN: z.string().optional(), @@ -35,6 +43,11 @@ export const env = createEnv({ GA4_API_SECRET: z.string().optional(), GA4_MEASUREMENT_ID: z.string().optional(), LINKEDIN_CONVERSIONS_ACCESS_TOKEN: z.string().optional(), + TRIGGER_QUEUE_CONCURRENCY: z.coerce.number().int().positive().max(100).optional(), + APP_ENVIRONMENT: z + .enum(['local', 'development', 'staging', 'production']) + .optional(), + SUPPRESS_POSTHOG_WARNING: z.enum(['true', 'false']).optional(), NOVU_API_KEY: z.string().optional(), }, @@ -93,6 +106,9 @@ export const env = createEnv({ NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL: process.env.NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL, NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL, NEXT_PUBLIC_BETTER_AUTH_URL: process.env.NEXT_PUBLIC_BETTER_AUTH_URL, + TRIGGER_QUEUE_CONCURRENCY: process.env.TRIGGER_QUEUE_CONCURRENCY, + APP_ENVIRONMENT: normalizedAppEnvironment, + SUPPRESS_POSTHOG_WARNING: normalizedSuppressPosthogWarning, NOVU_API_KEY: process.env.NOVU_API_KEY, NEXT_PUBLIC_NOVU_APPLICATION_IDENTIFIER: process.env.NEXT_PUBLIC_NOVU_APPLICATION_IDENTIFIER, }, diff --git a/apps/app/src/jobs/tasks/automation/execute-script.ts b/apps/app/src/jobs/tasks/automation/execute-script.ts index a53e33413..cf931a51d 100644 --- a/apps/app/src/jobs/tasks/automation/execute-script.ts +++ b/apps/app/src/jobs/tasks/automation/execute-script.ts @@ -1,3 +1,4 @@ +import { env } from '@/env.mjs'; import { getModelOptions } from '@/ai/gateway'; import { decrypt, type EncryptedData } from '@/lib/encryption'; import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; @@ -11,7 +12,7 @@ import { z } from 'zod'; // Queue for automation execution const automationExecutionQueue = queue({ name: 'automation-execution', - concurrencyLimit: 10, + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, }); interface ExecuteScriptPayload { diff --git a/apps/app/src/jobs/tasks/device/create-fleet-label-for-org.ts b/apps/app/src/jobs/tasks/device/create-fleet-label-for-org.ts index 73548ef27..7cff9f7c8 100644 --- a/apps/app/src/jobs/tasks/device/create-fleet-label-for-org.ts +++ b/apps/app/src/jobs/tasks/device/create-fleet-label-for-org.ts @@ -1,10 +1,14 @@ +import { env } from '@/env.mjs'; import { getFleetInstance } from '@/lib/fleet'; import { db } from '@db'; import { logger, queue, task } from '@trigger.dev/sdk'; import { AxiosError } from 'axios'; // Optional: define a queue if we want to control concurrency in v4 -const fleetQueue = queue({ name: 'create-fleet-label-for-org', concurrencyLimit: 10 }); +const fleetQueue = queue({ + name: 'create-fleet-label-for-org', + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, +}); export const createFleetLabelForOrg = task({ id: 'create-fleet-label-for-org', @@ -13,6 +17,13 @@ export const createFleetLabelForOrg = task({ maxAttempts: 3, }, run: async ({ organizationId }: { organizationId: string }) => { + if (!env.FLEET_URL) { + logger.warn('FLEET_URL is not configured; skipping Fleet label creation', { + organizationId, + }); + return; + } + const organization = await db.organization.findUnique({ where: { id: organizationId, diff --git a/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts b/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts index d719ebeb1..7c2aef5ad 100644 --- a/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts +++ b/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts @@ -1,4 +1,5 @@ import { RiskStatus, db } from '@db'; +import { env } from '@/env.mjs'; import { logger, queue, task } from '@trigger.dev/sdk'; import axios from 'axios'; import { @@ -8,8 +9,14 @@ import { } from './onboard-organization-helpers'; // Queues -const riskMitigationQueue = queue({ name: 'risk-mitigations', concurrencyLimit: 100 }); -const riskMitigationFanoutQueue = queue({ name: 'risk-mitigations-fanout', concurrencyLimit: 100 }); +const riskMitigationQueue = queue({ + name: 'risk-mitigations', + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, +}); +const riskMitigationFanoutQueue = queue({ + name: 'risk-mitigations-fanout', + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, +}); export const generateRiskMitigation = task({ id: 'generate-risk-mitigation', diff --git a/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts b/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts index 134f18691..1753842d8 100644 --- a/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts +++ b/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts @@ -1,4 +1,5 @@ import { VendorStatus, db } from '@db'; +import { env } from '@/env.mjs'; import { logger, queue, task } from '@trigger.dev/sdk'; import axios from 'axios'; import { @@ -8,10 +9,13 @@ import { } from './onboard-organization-helpers'; // Queues -const vendorMitigationQueue = queue({ name: 'vendor-risk-mitigations', concurrencyLimit: 100 }); +const vendorMitigationQueue = queue({ + name: 'vendor-risk-mitigations', + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, +}); const vendorMitigationFanoutQueue = queue({ name: 'vendor-risk-mitigations-fanout', - concurrencyLimit: 100, + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, }); export const generateVendorMitigation = task({ diff --git a/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts b/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts index c6a3e656d..dd95006e1 100644 --- a/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts +++ b/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts @@ -1,4 +1,5 @@ import { db } from '@db'; +import { env } from '@/env.mjs'; import { logger, queue, task, tasks } from '@trigger.dev/sdk'; import axios from 'axios'; import { generateRiskMitigationsForOrg } from './generate-risk-mitigation'; @@ -11,7 +12,10 @@ import { } from './onboard-organization-helpers'; // v4 queues must be declared in advance -const onboardOrgQueue = queue({ name: 'onboard-organization', concurrencyLimit: 100 }); +const onboardOrgQueue = queue({ + name: 'onboard-organization', + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, +}); export const onboardOrganization = task({ id: 'onboard-organization', diff --git a/apps/app/src/jobs/tasks/onboarding/update-policy.ts b/apps/app/src/jobs/tasks/onboarding/update-policy.ts index 3cf6e55ce..9de7d5435 100644 --- a/apps/app/src/jobs/tasks/onboarding/update-policy.ts +++ b/apps/app/src/jobs/tasks/onboarding/update-policy.ts @@ -1,3 +1,4 @@ +import { env } from '@/env.mjs'; import { logger, queue, schemaTask } from '@trigger.dev/sdk'; import { z } from 'zod'; import { processPolicyUpdate } from './update-policies-helpers'; @@ -7,7 +8,10 @@ if (!process.env.OPENAI_API_KEY) { } // v4: define queue ahead of time -export const updatePolicyQueue = queue({ name: 'update-policy', concurrencyLimit: 100 }); +export const updatePolicyQueue = queue({ + name: 'update-policy', + concurrencyLimit: env.TRIGGER_QUEUE_CONCURRENCY ?? 10, +}); export const updatePolicy = schemaTask({ id: 'update-policy', diff --git a/apps/app/src/middleware.test.ts b/apps/app/src/middleware.test.ts index a4277a657..b0c7b03c1 100644 --- a/apps/app/src/middleware.test.ts +++ b/apps/app/src/middleware.test.ts @@ -110,8 +110,8 @@ describe('Middleware', () => { // Assert expect(mockAuth.api.setActiveOrganization).not.toHaveBeenCalled(); - // Since user has no org, they should be allowed to access setup - expect(response.status).toBe(200); + expect(response.status).toBe(307); + expect(response.headers.get('location')).toBe('http://localhost:3000/setup'); }); it('should allow existing users to create additional orgs with intent param', async () => { @@ -148,9 +148,9 @@ describe('Middleware', () => { // Act const response = await middleware(request); - // Assert - expect(response.status).toBe(307); - expect(response.headers.get('location')).toBe('http://localhost:3000/org_123/frameworks'); + // Assert - middleware now lets setup render client-side instead of redirecting + expect(response.status).toBe(200); + expect(response.headers.get('location')).toBeNull(); }); }); @@ -171,9 +171,10 @@ describe('Middleware', () => { // Act const response = await middleware(request); - // Assert - expect(response.status).toBe(307); - expect(response.headers.get('location')).toBe('http://localhost:3000/upgrade/org_123'); + // Assert - middleware no longer redirects to upgrade from middleware layer + expect(mockDb.organization.findFirst).not.toHaveBeenCalled(); + expect(response.status).toBe(200); + expect(response.headers.get('location')).toBeNull(); }); it('should allow access with hasAccess = true', async () => { @@ -195,6 +196,7 @@ describe('Middleware', () => { // Assert expect(response.status).toBe(200); + expect(response.headers.get('location')).toBeNull(); }); it('should bypass access check for unprotected routes', async () => { @@ -230,9 +232,10 @@ describe('Middleware', () => { // Act const response = await middleware(request); - // Assert - expect(response.status).toBe(307); - expect(response.headers.get('location')).toBe('http://localhost:3000/upgrade/org_123'); + // Assert - middleware does not perform upgrade redirects anymore + expect(mockDb.organization.findFirst).not.toHaveBeenCalled(); + expect(response.status).toBe(200); + expect(response.headers.get('location')).toBeNull(); }); it('should preserve query parameters when redirecting to upgrade', async () => { @@ -252,9 +255,10 @@ describe('Middleware', () => { const response = await middleware(request); // Assert - expect(response.status).toBe(307); + expect(mockDb.organization.findFirst).not.toHaveBeenCalled(); + expect(response.status).toBe(200); const location = response.headers.get('location'); - expect(location).toBe('http://localhost:3000/upgrade/org_123?redirect=policies&tab=active'); + expect(location).toBeNull(); }); }); @@ -275,12 +279,9 @@ describe('Middleware', () => { // Act const response = await middleware(request); - // Assert - expect(mockAuth.api.setActiveOrganization).toHaveBeenCalledWith({ - headers: expect.any(Object), - body: { organizationId: 'org_123' }, - }); - expect(response.status).toBe(307); // Redirect to refresh session + // Assert - middleware no longer heals sessions + expect(mockAuth.api.setActiveOrganization).not.toHaveBeenCalled(); + expect(response.status).toBe(200); }); }); @@ -306,9 +307,10 @@ describe('Middleware', () => { // Act const response = await middleware(request); - // Assert - expect(response.status).toBe(307); - expect(response.headers.get('location')).toBe('http://localhost:3000/onboarding/org_123'); + // Assert - onboarding gating handled downstream now + expect(mockDb.organization.findUnique).not.toHaveBeenCalled(); + expect(response.status).toBe(200); + expect(response.headers.get('location')).toBeNull(); }); it('should allow access to product when onboarding is completed', async () => { @@ -361,11 +363,10 @@ describe('Middleware', () => { const response = await middleware(request); // Assert - expect(response.status).toBe(307); + expect(mockDb.organization.findUnique).not.toHaveBeenCalled(); + expect(response.status).toBe(200); const location = response.headers.get('location'); - expect(location).toBe( - 'http://localhost:3000/onboarding/org_123?checkoutComplete=starter&value=99', - ); + expect(location).toBeNull(); }); it('should not check onboarding for unprotected routes', async () => { diff --git a/apps/app/src/test-utils/helpers/middleware.ts b/apps/app/src/test-utils/helpers/middleware.ts index 556b63342..41f51e562 100644 --- a/apps/app/src/test-utils/helpers/middleware.ts +++ b/apps/app/src/test-utils/helpers/middleware.ts @@ -1,5 +1,6 @@ import type { Session } from '@/utils/auth'; import { NextRequest, NextResponse } from 'next/server'; +import { getLatestSession } from '@/test-utils/mocks/auth'; interface MockRequestOptions { session?: Session | null; @@ -12,7 +13,7 @@ export async function createMockRequest( pathname: string, options: MockRequestOptions = {}, ): Promise { - const { headers = {}, searchParams = {}, method = 'GET' } = options; + const { headers = {}, searchParams = {}, method = 'GET', session } = options; // Build URL with search params const url = new URL(pathname, 'http://localhost:3000'); @@ -28,6 +29,19 @@ export async function createMockRequest( ...headers, }); + // Attach session token cookie when provided + const sessionToken = session?.token ?? getLatestSession()?.token; + + if (sessionToken) { + const existingCookies = headersInit.get('cookie'); + const sessionCookies = [ + `__Secure-better-auth.session_token=${sessionToken}`, + `better-auth.session_token=${sessionToken}`, + ]; + const cookieHeader = [existingCookies, sessionCookies.join('; ')].filter(Boolean).join('; '); + headersInit.set('cookie', cookieHeader); + } + // Create the request const request = new NextRequest(url, { method, diff --git a/apps/app/src/test-utils/mocks/auth.ts b/apps/app/src/test-utils/mocks/auth.ts index 3c8bc37b0..bbf5401b6 100644 --- a/apps/app/src/test-utils/mocks/auth.ts +++ b/apps/app/src/test-utils/mocks/auth.ts @@ -1,6 +1,8 @@ -import { Departments, type Member, type Session, type User } from '@db'; +import type { Member, Session, User } from '@db'; import { vi } from 'vitest'; +let latestSession: Session | null = null; + // Mock auth API structure export const mockAuthApi = { getSession: vi.fn(), @@ -59,7 +61,7 @@ export const createMockMember = (overrides?: Partial): Member => ({ organizationId: 'org_test123', role: 'owner', createdAt: new Date(), - department: Departments.none, + department: 'none' as Member['department'], isActive: true, fleetDmLabelId: null, ...overrides, @@ -82,6 +84,8 @@ export const setupAuthMocks = (options?: { }) : null); + latestSession = sessionData; + // Mock getSession to return the proper structure mockAuthApi.getSession.mockResolvedValue( sessionData && userData ? { session: sessionData, user: userData } : null, @@ -96,3 +100,5 @@ export const setupAuthMocks = (options?: { member: memberData, }; }; + +export const getLatestSession = () => latestSession; diff --git a/apps/app/trigger.config.ts b/apps/app/trigger.config.ts index 2841b3a7e..967309f0f 100644 --- a/apps/app/trigger.config.ts +++ b/apps/app/trigger.config.ts @@ -4,20 +4,31 @@ import { puppeteer } from '@trigger.dev/build/extensions/puppeteer'; import { defineConfig } from '@trigger.dev/sdk'; import { prismaExtension } from './customPrismaExtension'; +type BuildExtensionType = + | ReturnType + | ReturnType + | ReturnType; + +const buildExtensions: BuildExtensionType[] = [ + prismaExtension({ + version: '6.13.0', + dbPackageVersion: '^1.3.7', // Version of @trycompai/db package with compiled JS + }), + puppeteer(), +]; + +// Only enable Vercel sync when project is configured to avoid noise locally. +if (process.env.VERCEL_PROJECT_ID) { + buildExtensions.push(syncVercelEnvVars()); +} + export default defineConfig({ - project: 'proj_lhxjliiqgcdyqbgtucda', + project: process.env.TRIGGER_PROJECT_ID!, logLevel: 'log', instrumentations: [new PrismaInstrumentation()], maxDuration: 300, // 5 minutes build: { - extensions: [ - prismaExtension({ - version: '6.13.0', - dbPackageVersion: '^1.3.7', // Version of @trycompai/db package with compiled JS - }), - puppeteer(), - syncVercelEnvVars(), - ], + extensions: buildExtensions, }, retries: { enabledInDev: true, diff --git a/apps/portal/src/app/lib/auth.ts b/apps/portal/src/app/lib/auth.ts index 45b8fc073..d65a10a5c 100644 --- a/apps/portal/src/app/lib/auth.ts +++ b/apps/portal/src/app/lib/auth.ts @@ -12,9 +12,10 @@ export const auth = betterAuth({ provider: 'postgresql', }), advanced: { - // This will enable us to fall back to DB for ID generation. - // It's important so we can use custom IDs specified in Prisma Schema. - generateId: false, + database: { + // Fallback to DB for ID generation to respect Prisma model IDs. + generateId: false, + }, }, trustedOrigins: ['http://localhost:3000', 'https://*.trycomp.ai'], secret: env.AUTH_SECRET!, diff --git a/bun.lock b/bun.lock index 6fc52fd2a..205270ce9 100644 --- a/bun.lock +++ b/bun.lock @@ -35,7 +35,7 @@ "@types/react-dom": "^19.1.1", "ai": "^5.0.0", "better-auth": "^1.2.8", - "concurrently": "^9.1.2", + "concurrently": "^9.2.1", "d3": "^7.9.0", "date-fns": "^4.1.0", "dayjs": "^1.11.13", diff --git a/docker-compose.yml b/docker-compose.yml index 686a667c2..c94558359 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,26 +5,53 @@ services: dockerfile: Dockerfile target: migrator env_file: - - packages/db/.env + - .env + restart: 'no' seeder: build: context: . dockerfile: Dockerfile target: migrator env_file: - - packages/db/.env - command: sh -lc "bunx prisma generate --schema=node_modules/@trycompai/db/dist/schema.prisma && bun packages/db/prisma/seed/seed.js" + - .env + command: >- + sh -lc "set -eu; + echo '[Seeder] Starting seed job'; + echo '[Seeder] Generating Prisma client'; + bunx prisma generate --schema=node_modules/@trycompai/db/dist/schema.prisma; + echo '[Seeder] Running seed script'; + bun packages/db/prisma/seed/seed.ts; + echo '[Seeder] Seed job finished'" + depends_on: + migrator: + condition: service_completed_successfully + environment: + FORCE_DATABASE_WIPE_AND_RESEED: ${FORCE_DATABASE_WIPE_AND_RESEED:-false} + restart: 'no' app: build: context: . dockerfile: Dockerfile target: app args: - NEXT_PUBLIC_BETTER_AUTH_URL: ${BETTER_AUTH_URL} + NEXT_PUBLIC_BETTER_AUTH_URL: ${NEXT_PUBLIC_BETTER_AUTH_URL:-http://localhost:3000} + NEXT_PUBLIC_PORTAL_URL: ${NEXT_PUBLIC_PORTAL_URL:-http://localhost:3002} + APP_ENVIRONMENT: ${APP_ENVIRONMENT:-local} + TRIGGER_PROJECT_ID: ${TRIGGER_PROJECT_ID} + secrets: + - trigger_env_file ports: - '3000:3000' env_file: - - apps/app/.env + - .env + environment: + APP_ENVIRONMENT: ${APP_ENVIRONMENT:-local} + TRIGGER_PROJECT_ID: ${TRIGGER_PROJECT_ID} + depends_on: + migrator: + condition: service_completed_successfully + seeder: + condition: service_completed_successfully healthcheck: test: ['CMD-SHELL', 'curl -f http://localhost:3000/api/health || exit 1'] interval: 30s @@ -38,13 +65,25 @@ services: dockerfile: Dockerfile target: portal args: - NEXT_PUBLIC_BETTER_AUTH_URL: ${BETTER_AUTH_URL_PORTAL} + BETTER_AUTH_URL: ${BETTER_AUTH_URL:-http://localhost:3002} ports: - '3002:3000' env_file: - - apps/portal/.env + - .env + environment: + APP_ENVIRONMENT: ${APP_ENVIRONMENT:-local} + BETTER_AUTH_URL: ${BETTER_AUTH_URL:-http://localhost:3002} + depends_on: + migrator: + condition: service_completed_successfully + seeder: + condition: service_completed_successfully healthcheck: test: ['CMD-SHELL', 'curl -f http://localhost:3002/ || exit 1'] interval: 30s timeout: 10s retries: 3 + +secrets: + trigger_env_file: + file: .env diff --git a/package.json b/package.json index 60ccb44bd..a64863776 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,7 @@ "@types/react-dom": "^19.1.1", "ai": "^5.0.0", "better-auth": "^1.2.8", - "concurrently": "^9.1.2", + "concurrently": "^9.2.1", "d3": "^7.9.0", "date-fns": "^4.1.0", "dayjs": "^1.11.13", diff --git a/packages/db/prisma/seed/seed.ts b/packages/db/prisma/seed/seed.ts index 12f0d3045..3a8fd0924 100644 --- a/packages/db/prisma/seed/seed.ts +++ b/packages/db/prisma/seed/seed.ts @@ -5,11 +5,68 @@ import { frameworkEditorModelSchemas } from './frameworkEditorSchemas'; const prisma = new PrismaClient(); +const forceDatabaseWipeAndReseed = + process.env.FORCE_DATABASE_WIPE_AND_RESEED?.toLowerCase() === 'true'; + +async function shouldSkipSeeding(): Promise { + try { + const count = await prisma.frameworkEditorFramework.count(); + + if (count > 0) { + if (!forceDatabaseWipeAndReseed) { + console.log( + `[Seeder] Seed data already present (FrameworkEditorFramework count: ${count}). Skipping seed.`, + ); + await prisma.$disconnect(); + return true; + } + + console.log( + `[Seeder] FORCE_DATABASE_WIPE_AND_RESEED=true detected. Wiping ${count} existing FrameworkEditor frameworks before reseeding.`, + ); + return false; + } + + if (forceDatabaseWipeAndReseed) { + console.log('[Seeder] FORCE_DATABASE_WIPE_AND_RESEED=true detected but no existing data found. Proceeding with seed.'); + } + + return false; + } catch (error) { + console.warn('[Seeder] Seed pre-check failed; proceeding with seeding anyway.', error); + return false; + } +} + +async function wipeSeededData() { + console.log('[Seeder] Clearing existing Framework Editor seed data...'); + + const joinTables = [ + '_FrameworkEditorControlTemplateToFrameworkEditorPolicyTemplate', + '_FrameworkEditorControlTemplateToFrameworkEditorRequirement', + '_FrameworkEditorControlTemplateToFrameworkEditorTaskTemplate', + ]; + + for (const table of joinTables) { + await prisma.$executeRawUnsafe(`DELETE FROM "${table}";`); + } + + await prisma.frameworkEditorControlTemplate.deleteMany({}); + await prisma.frameworkEditorPolicyTemplate.deleteMany({}); + await prisma.frameworkEditorTaskTemplate.deleteMany({}); + await prisma.frameworkEditorRequirement.deleteMany({}); + await prisma.frameworkEditorFramework.deleteMany({}); + await prisma.frameworkEditorVideo.deleteMany({}); + + console.log('[Seeder] Existing Framework Editor seed data cleared.'); +} + async function seedJsonFiles(subDirectory: string) { const directoryPath = path.join(__dirname, subDirectory); - console.log(`Starting to seed files from: ${directoryPath}`); + console.log(`[Seeder] Starting to seed files from: ${directoryPath}`); const files = await fs.readdir(directoryPath); const jsonFiles = files.filter((file) => file.endsWith('.json')); + console.log(`[Seeder] Found ${jsonFiles.length} JSON file(s) in ${subDirectory}`); // Ensure deterministic order for primitives so FK dependencies are satisfied // Specifically, seed Frameworks before Requirements (which reference Frameworks) @@ -22,7 +79,10 @@ async function seedJsonFiles(subDirectory: string) { jsonFiles.sort((a, b) => getPriority(a) - getPriority(b)); } - for (const jsonFile of jsonFiles) { + for (const [index, jsonFile] of jsonFiles.entries()) { + console.log( + `[Seeder] -> Processing ${jsonFile} (${index + 1}/${jsonFiles.length}) from ${subDirectory}`, + ); try { const filePath = path.join(directoryPath, jsonFile); const jsonContent = await fs.readFile(filePath, 'utf-8'); @@ -86,7 +146,7 @@ async function seedJsonFiles(subDirectory: string) { }); console.log( - `Seeding ${processedData.length} records from ${jsonFile} into ${prismaModelKey}...`, + `[Seeder] Seeding ${processedData.length} records from ${jsonFile} into ${prismaModelKey}...`, ); // Use upsert to update existing records instead of skipping them @@ -98,7 +158,7 @@ async function seedJsonFiles(subDirectory: string) { }); } - console.log(`Finished seeding ${jsonFile} from primitives.`); + console.log(`[Seeder] Finished seeding ${jsonFile} from primitives.`); } else if (subDirectory === 'relations') { // Expected filename format: _ModelAToModelB.json if (!jsonFile.startsWith('_') || !jsonFile.includes('To')) { @@ -149,7 +209,7 @@ async function seedJsonFiles(subDirectory: string) { } console.log( - `Processing relations from ${jsonFile} for ${prismaModelAName} to connect via ${relationFieldNameOnModelA}...`, + `[Seeder] Processing relations from ${jsonFile} for ${prismaModelAName} to connect via ${relationFieldNameOnModelA}...`, ); let connectionsMade = 0; for (const relationItem of jsonData) { @@ -178,23 +238,36 @@ async function seedJsonFiles(subDirectory: string) { // Decide if one error should stop the whole process for this file or continue } } - console.log(`Finished processing ${jsonFile}. Made ${connectionsMade} connections.`); + console.log(`[Seeder] Finished processing ${jsonFile}. Made ${connectionsMade} connections.`); } } catch (error) { console.error(`Error processing ${jsonFile}:`, error); throw error; } + console.log(`[Seeder] <- Completed ${jsonFile}`); } + + console.log(`[Seeder] Completed directory ${subDirectory}`); } async function main() { + if (await shouldSkipSeeding()) { + return; + } + try { + if (forceDatabaseWipeAndReseed) { + await wipeSeededData(); + } + + console.log('[Seeder] Beginning primitives seed pass'); await seedJsonFiles('primitives'); + console.log('[Seeder] Beginning relations seed pass'); await seedJsonFiles('relations'); await prisma.$disconnect(); - console.log('Seeding completed successfully for primitives and relations.'); + console.log('[Seeder] Seeding completed successfully for primitives and relations.'); } catch (error: unknown) { - console.error('Seeding failed:', error); + console.error('[Seeder] Seeding failed:', error); await prisma.$disconnect(); process.exit(1); } diff --git a/packages/ui/src/components/editor/utils/validate-content.ts b/packages/ui/src/components/editor/utils/validate-content.ts index 88dc9109f..6a6d6e598 100644 --- a/packages/ui/src/components/editor/utils/validate-content.ts +++ b/packages/ui/src/components/editor/utils/validate-content.ts @@ -58,13 +58,12 @@ function fixContentArray(contentArray: any[]): JSONContent[] { return fixedContent; } -function ensureNonEmptyText(value: unknown): string { +function ensureNonEmptyText(value: unknown): string | null { const text = typeof value === 'string' ? value : ''; - // Normalize NBSP and narrow no-break space for emptiness checks - const normalized = text.replace(/[\u00A0\u202F]/g, ''); + // Normalize NBSP, narrow no-break space, and zero-width space for emptiness checks + const normalized = text.replace(/[\u00A0\u202F\u200B]/g, ''); if (normalized.trim().length > 0) return text; - // Return zero-width space to ensure non-empty text node without visual change - return '\u200B'; + return null; } /** @@ -133,9 +132,13 @@ function fixParagraph(node: any): JSONContent { .map((item: any) => { // Fix text nodes that are missing the type property if (item.text && !item.type) { + const cleaned = ensureNonEmptyText(item.text); + if (!cleaned) { + return null; + } return { type: 'text', - text: ensureNonEmptyText(item.text), + text: cleaned, ...(item.marks && { marks: fixMarks(item.marks) }), }; } @@ -210,10 +213,13 @@ function fixListItem(node: any): JSONContent { /** * Fixes text nodes */ -function fixTextNode(node: any): JSONContent { +function fixTextNode(node: any): JSONContent | null { const { text, marks, ...rest } = node; const value = ensureNonEmptyText(text); + if (!value) { + return null; + } return { type: 'text', text: value,