diff --git a/apps/api/Dockerfile b/apps/api/Dockerfile index 1f0c0454f..e2a42ce41 100644 --- a/apps/api/Dockerfile +++ b/apps/api/Dockerfile @@ -13,7 +13,7 @@ COPY package.json ./ # Note: workspace:* dependencies will be skipped and copied manually below RUN curl -fsSL https://bun.sh/install | bash \ && export PATH="/root/.bun/bin:$PATH" \ - && bun install --production --ignore-scripts || true + && bun install --production --ignore-scripts COPY node_modules/@trycompai ./node_modules/@trycompai COPY node_modules/@prisma ./node_modules/@prisma diff --git a/apps/api/buildspec.yml b/apps/api/buildspec.yml index 88c2bb282..7a8b8c257 100644 --- a/apps/api/buildspec.yml +++ b/apps/api/buildspec.yml @@ -32,11 +32,6 @@ phases: - echo "Installing API dependencies only..." - bun install --filter=@comp/api --frozen-lockfile || bun install --filter=@comp/api --ignore-scripts || bun install --ignore-scripts - - echo "Building @trycompai/email package..." - - cd packages/email - - bun run build - - cd ../.. - - echo "Building NestJS application..." - echo "APP_NAME is set to $APP_NAME" - echo "Current directory $(pwd)" @@ -60,13 +55,14 @@ phases: - '[ -f "../docker-build/src/main.js" ] || { echo "❌ main.js not found in docker-build/src"; exit 1; }' - mkdir -p ../docker-build/node_modules/@trycompai - - mkdir -p ../docker-build/node_modules/@trycompai/email - - cp -r ../../packages/email/dist ../docker-build/node_modules/@trycompai/email/ - - cp ../../packages/email/package.json ../docker-build/node_modules/@trycompai/email/ - mkdir -p ../docker-build/node_modules/@trycompai/utils + - mkdir -p ../docker-build/node_modules/@trycompai/db - cp -r ../../packages/utils/src ../docker-build/node_modules/@trycompai/utils/ - cp ../../packages/utils/package.json ../docker-build/node_modules/@trycompai/utils/ + - cp -r ../../packages/db/dist ../docker-build/node_modules/@trycompai/db/ + - cp ../../packages/db/package.json ../docker-build/node_modules/@trycompai/db/ + - cp -r ../../node_modules/@prisma ../docker-build/node_modules/@prisma - cp -r ../../node_modules/.prisma ../docker-build/node_modules/.prisma diff --git a/apps/api/package.json b/apps/api/package.json index 4bed55244..a8e73dff6 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -12,8 +12,8 @@ "@nestjs/platform-express": "^11.1.5", "@nestjs/swagger": "^11.2.0", "@prisma/client": "^6.13.0", + "@react-email/components": "^0.0.41", "@trycompai/db": "^1.3.17", - "@trycompai/email": "workspace:*", "archiver": "^7.0.1", "axios": "^1.12.2", "better-auth": "^1.3.27", @@ -25,6 +25,8 @@ "nanoid": "^5.1.6", "pdf-lib": "^1.17.1", "prisma": "^6.13.0", + "react": "^19.1.1", + "react-dom": "^19.1.0", "reflect-metadata": "^0.2.2", "resend": "^6.4.2", "rxjs": "^7.8.1", diff --git a/apps/api/src/device-agent/device-agent.controller.ts b/apps/api/src/device-agent/device-agent.controller.ts index 3323b03cf..1b54c1ef6 100644 --- a/apps/api/src/device-agent/device-agent.controller.ts +++ b/apps/api/src/device-agent/device-agent.controller.ts @@ -71,14 +71,8 @@ export class DeviceAgentController { @AuthContext() authContext: AuthContextType, @Response({ passthrough: true }) res: ExpressResponse, ) { - // Use the authenticated user's ID as the employee ID - const employeeId = authContext.userId || 'unknown-user'; - const { stream, filename, contentType } = - await this.deviceAgentService.downloadWindowsAgent( - organizationId, - employeeId, - ); + await this.deviceAgentService.downloadWindowsAgent(); // Set headers for file download res.set({ diff --git a/apps/api/src/device-agent/device-agent.service.ts b/apps/api/src/device-agent/device-agent.service.ts index cfb1e411a..33cbe9d70 100644 --- a/apps/api/src/device-agent/device-agent.service.ts +++ b/apps/api/src/device-agent/device-agent.service.ts @@ -1,13 +1,6 @@ import { Injectable, NotFoundException, Logger } from '@nestjs/common'; import { S3Client, GetObjectCommand } from '@aws-sdk/client-s3'; -import { Readable, PassThrough } from 'stream'; -import archiver from 'archiver'; -import { generateWindowsScript } from './scripts/windows'; -import { - getPackageFilename, - getReadmeContent, - getScriptFilename, -} from './scripts/common'; +import { Readable } from 'stream'; @Injectable() export class DeviceAgentService { @@ -73,56 +66,12 @@ export class DeviceAgentService { } } - async downloadWindowsAgent( - organizationId: string, - employeeId: string, - ): Promise<{ stream: Readable; filename: string; contentType: string }> { + async downloadWindowsAgent(): Promise<{ stream: Readable; filename: string; contentType: string }> { try { - this.logger.log( - `Creating Windows agent zip for org ${organizationId}, employee ${employeeId}`, - ); - - // Hardcoded device marker paths used by the setup scripts - const fleetDevicePathWindows = 'C:\\ProgramData\\CompAI\\Fleet'; - - // Generate the Windows setup script - const script = generateWindowsScript({ - orgId: organizationId, - employeeId: employeeId, - fleetDevicePath: fleetDevicePathWindows, - }); - - // Create a passthrough stream for the response - const passThrough = new PassThrough(); - const archive = archiver('zip', { zlib: { level: 9 } }); - - // Pipe archive to passthrough - archive.pipe(passThrough); - - // Error handling for the archive - archive.on('error', (err) => { - this.logger.error('Archive error:', err); - passThrough.destroy(err); - }); - - archive.on('warning', (warn) => { - this.logger.warn('Archive warning:', warn); - }); - - // Add script file - const scriptFilename = getScriptFilename('windows'); - archive.append(script, { name: scriptFilename, mode: 0o755 }); - - // Add README - const readmeContent = getReadmeContent('windows'); - archive.append(readmeContent, { name: 'README.txt' }); - - // Get MSI package from S3 and stream it into the zip - const windowsPackageFilename = 'fleet-osquery.msi'; + const windowsPackageFilename = 'Comp AI Agent 1.0.0.exe'; const packageKey = `windows/${windowsPackageFilename}`; - const packageFilename = getPackageFilename('windows'); - this.logger.log(`Downloading Windows MSI from S3: ${packageKey}`); + this.logger.log(`Downloading Windows agent from S3: ${packageKey}`); const getObjectCommand = new GetObjectCommand({ Bucket: this.fleetBucketName, @@ -131,31 +80,27 @@ export class DeviceAgentService { const s3Response = await this.s3Client.send(getObjectCommand); - if (s3Response.Body) { - const s3Stream = s3Response.Body as Readable; - s3Stream.on('error', (err) => { - this.logger.error('S3 stream error:', err); - passThrough.destroy(err); - }); - archive.append(s3Stream, { name: packageFilename, store: true }); - } else { - this.logger.warn( - 'Windows MSI file not found in S3, creating zip without MSI', - ); + if (!s3Response.Body) { + throw new NotFoundException('Windows agent executable file not found in S3'); } - // Finalize the archive - archive.finalize(); + // Use S3 stream directly as Node.js Readable + const s3Stream = s3Response.Body as Readable; - this.logger.log('Successfully created Windows agent zip'); + this.logger.log( + `Successfully retrieved Windows agent: ${windowsPackageFilename}`, + ); return { - stream: passThrough, - filename: `compai-device-agent-windows.zip`, - contentType: 'application/zip', + stream: s3Stream, + filename: windowsPackageFilename, + contentType: 'application/octet-stream', }; } catch (error) { - this.logger.error('Failed to create Windows agent zip:', error); + if (error instanceof NotFoundException) { + throw error; + } + this.logger.error('Failed to download Windows agent from S3:', error); throw error; } } diff --git a/apps/api/src/device-agent/scripts/common.ts b/apps/api/src/device-agent/scripts/common.ts index a243f69fb..99bbbe283 100644 --- a/apps/api/src/device-agent/scripts/common.ts +++ b/apps/api/src/device-agent/scripts/common.ts @@ -7,38 +7,3 @@ export function getScriptFilename(os: SupportedOS): string { export function getPackageFilename(os: SupportedOS): string { return os === 'macos' ? 'compai-device-agent.pkg' : 'compai-device-agent.msi'; } - -export function getReadmeContent(os: SupportedOS): string { - if (os === 'macos') { - return `Installation Instructions for macOS: - -1. First, run the setup script by double-clicking "run_me_first.command" - - This will create the necessary organization markers for device management - - You may need to allow the script to run in System Preferences > Security & Privacy - -2. Then, install the agent by double-clicking "compai-device-agent.pkg" - - Follow the installation wizard - - You may need to allow the installer in System Preferences > Security & Privacy - -3. The agent will start automatically after installation -`; - } - - return `Installation Instructions for Windows: - -1. First, run the setup script: - - Right-click on "run_me_first.bat" and select "Run as administrator" (required) - - This writes organization markers to the device and registry - - If prompted by SmartScreen, click "More info" -> "Run anyway" - -2. Then, install the agent: - - Double-click "compai-device-agent.msi" and follow the wizard - -3. Troubleshooting: - - If setup fails, open the log at: %ProgramData%\\CompAI\\Fleet or %Public%\\CompAI\\Fleet -> setup.log - - Ensure your antivirus or endpoint protection allows running local .bat files - - If you cannot run as administrator, ask IT to assist or install both files and registry keys manually - -4. After installation, the agent will start automatically. -`; -} diff --git a/apps/api/src/device-agent/scripts/windows.ts b/apps/api/src/device-agent/scripts/windows.ts deleted file mode 100644 index f0985c0c2..000000000 --- a/apps/api/src/device-agent/scripts/windows.ts +++ /dev/null @@ -1,222 +0,0 @@ -import type { ScriptConfig } from './types'; - -export function generateWindowsScript(config: ScriptConfig): string { - const { orgId, employeeId, fleetDevicePath } = config; - - const script = `@echo off -title CompAI Device Setup -setlocal EnableExtensions EnableDelayedExpansion -color 0A - -REM ========================= -REM Variables -REM ========================= -set "ORG_ID=${orgId}" -set "EMPLOYEE_ID=${employeeId}" -set "PRIMARY_DIR=${fleetDevicePath}" -set "FALLBACK_DIR=C:\\Users\\Public\\CompAI\\Fleet" -set "CHOSEN_DIR=" -set "LOG_FILE=" -set "HAS_ERROR=0" -set "ERRORS=" -set "EXIT_CODE=0" -REM newline token (exactly this 2-line shape) -set "nl=^ -" - -REM --- bootstrap log (updated once CHOSEN_DIR is known) --- -set "LOG_FILE=%~dp0setup.log" - -goto :main - -REM ======================================================= -REM Subroutines (placed AFTER main to avoid early execution) -REM ======================================================= -:log_msg -setlocal EnableDelayedExpansion -set "msg=%~1" -echo [%date% %time%] !msg! ->>"%LOG_FILE%" echo [%date% %time%] !msg! -endlocal & exit /b 0 - -:log_run -setlocal EnableDelayedExpansion -set "cmdline=%*" -echo [%date% %time%] CMD: !cmdline! ->>"%LOG_FILE%" echo [%date% %time%] CMD: !cmdline! -%* -set "rc=!errorlevel!" -if not "!rc!"=="0" ( - echo [%date% %time%] ERR !rc!: !cmdline! - >>"%LOG_FILE%" echo [%date% %time%] ERR !rc!: !cmdline! -) -endlocal & set "LAST_RC=%rc%" -exit /b %LAST_RC% - -REM ========================= -REM Main -REM ========================= -:main -call :log_msg "Script starting" - -REM Admin check -whoami /groups | find "S-1-16-12288" >nul 2>&1 -if errorlevel 1 ( - color 0E - echo This script must be run as Administrator. - echo Please right-click the file and select "Run as administrator". - echo. - echo Press any key to exit, then try again with Administrator privileges. - pause - exit /b 5 -) - -REM Relaunch persistent window -if not "%PERSIST%"=="1" ( - set "PERSIST=1" - call :log_msg "Re-launching in a persistent window" - start "CompAI Device Setup" cmd /k "%~f0 %*" - exit /b -) - -call :log_msg "Running with administrator privileges" -call :log_msg "Current directory: %cd%" -call :log_msg "Script path: %~f0" -call :log_msg "Switching working directory to script folder" -cd /d "%~dp0" -call :log_msg "New current directory: %cd%" -echo. - -REM Choose writable directory -call :log_msg "Choosing destination directory; primary=%PRIMARY_DIR% fallback=%FALLBACK_DIR%" -if exist "%PRIMARY_DIR%\\*" set "CHOSEN_DIR=%PRIMARY_DIR%" -if not defined CHOSEN_DIR call :log_run mkdir "%PRIMARY_DIR%" -if not defined CHOSEN_DIR if exist "%PRIMARY_DIR%\\*" set "CHOSEN_DIR=%PRIMARY_DIR%" - -if not defined CHOSEN_DIR call :log_msg "Primary not available; trying fallback" -if not defined CHOSEN_DIR if exist "%FALLBACK_DIR%\\*" set "CHOSEN_DIR=%FALLBACK_DIR%" -if not defined CHOSEN_DIR call :log_run mkdir "%FALLBACK_DIR%" -if not defined CHOSEN_DIR if exist "%FALLBACK_DIR%\\*" set "CHOSEN_DIR=%FALLBACK_DIR%" - -if not defined CHOSEN_DIR ( - color 0E - call :log_msg "WARNING: No writable directory found" - echo Primary attempted: "%PRIMARY_DIR%" - echo Fallback attempted: "%FALLBACK_DIR%" - echo [%date% %time%] No writable directory found. Primary: %PRIMARY_DIR%, Fallback: %FALLBACK_DIR% >> "%~dp0setup.log" - set "LOG_FILE=%~dp0setup.log" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- No writable directory found (Primary: %PRIMARY_DIR%, Fallback: %FALLBACK_DIR%).!nl!" - set "EXIT_CODE=1" -) else ( - set "MARKER_DIR=%CHOSEN_DIR%" - if not "!MARKER_DIR:~-1!"=="\\" set "MARKER_DIR=!MARKER_DIR!\\" - - REM switch the log file to the chosen directory, carry over bootstrap logs - set "FINAL_LOG=!MARKER_DIR!setup.log" - if /i not "%LOG_FILE%"=="%FINAL_LOG%" ( - call :log_msg "Switching log to !FINAL_LOG!" - if exist "%LOG_FILE%" type "%LOG_FILE%" >> "!FINAL_LOG!" & del "%LOG_FILE%" - set "LOG_FILE=!FINAL_LOG!" - ) - call :log_msg "Using directory: !MARKER_DIR!" -) -echo Logs will be written to: !LOG_FILE! -echo. - -REM Write marker files -if defined CHOSEN_DIR ( - call :log_msg "Writing organization marker file" - call :log_msg "Preparing to write org marker to !MARKER_DIR!!ORG_ID!" - call :log_run cmd /c "(echo %ORG_ID%) > \"!MARKER_DIR!!ORG_ID!\"" - if errorlevel 1 ( - color 0E - call :log_msg "WARNING: Failed writing organization marker file to !MARKER_DIR!" - echo [%date% %time%] Failed writing org marker file >> "%LOG_FILE%" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- Failed writing organization marker file.!nl!" - set "EXIT_CODE=1" - ) else ( - call :log_msg "[OK] Organization marker file: !MARKER_DIR!!ORG_ID!" - ) - - call :log_msg "Writing employee marker file" - call :log_msg "Preparing to write employee marker to !MARKER_DIR!!EMPLOYEE_ID!" - call :log_run cmd /c "(echo %EMPLOYEE_ID%) > \"!MARKER_DIR!!EMPLOYEE_ID!\"" - if errorlevel 1 ( - color 0E - call :log_msg "WARNING: Failed writing employee marker file to !MARKER_DIR!" - echo [%date% %time%] Failed writing employee marker file >> "%LOG_FILE%" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- Failed writing employee marker file.!nl!" - set "EXIT_CODE=1" - ) else ( - call :log_msg "[OK] Employee marker file: !MARKER_DIR!!EMPLOYEE_ID!" - ) -) - -REM Permissions -if defined CHOSEN_DIR ( - call :log_msg "Setting permissions on marker directory" - call :log_run icacls "!MARKER_DIR!" /inheritance:e - - call :log_msg "Granting read to SYSTEM and Administrators on org marker" - call :log_run icacls "!MARKER_DIR!!ORG_ID!" /grant *S-1-5-18:R *S-1-5-32-544:R - - call :log_msg "Granting read to SYSTEM and Administrators on employee marker" - call :log_run icacls "!MARKER_DIR!!EMPLOYEE_ID!" /grant *S-1-5-18:R *S-1-5-32-544:R -) - -REM Verify -echo. -echo Verifying markers... -if defined CHOSEN_DIR ( - call :log_msg "Verifying marker exists: !MARKER_DIR!!EMPLOYEE_ID!" - if not exist "!MARKER_DIR!!EMPLOYEE_ID!" ( - color 0E - call :log_msg "WARNING: Employee marker file missing at !MARKER_DIR!!EMPLOYEE_ID!" - echo [%date% %time%] Verification failed: employee marker file missing >> "!LOG_FILE!" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- Employee marker file missing at !MARKER_DIR!!EMPLOYEE_ID!!.!nl!" - set "EXIT_CODE=2" - ) else ( - call :log_msg "[OK] Employee marker file present: !MARKER_DIR!!EMPLOYEE_ID!" - ) -) -rem Skipping registry checks per request - -REM Result / Exit -echo. -echo ------------------------------------------------------------ -if "%HAS_ERROR%"=="0" ( - color 0A - echo RESULT: SUCCESS - echo Setup completed successfully for %EMPLOYEE_ID%. - if defined CHOSEN_DIR echo Files created in: !CHOSEN_DIR! - echo Log file: !LOG_FILE! - call :log_msg "RESULT: SUCCESS" -) else ( - color 0C - echo RESULT: COMPLETED WITH ISSUES - echo One or more steps did not complete successfully. Details: - echo. - echo !ERRORS! - echo. - echo Next steps: - echo - Take a screenshot of this window. - echo - Attach the log file from: !LOG_FILE! - echo - Share both with your CompAI support contact. - call :log_msg "RESULT: COMPLETED WITH ISSUES (exit=%EXIT_CODE%)" -) -echo ------------------------------------------------------------ -echo. -echo Press any key to close this window. This will not affect installation. -pause -if "%HAS_ERROR%"=="0" (exit /b 0) else (exit /b %EXIT_CODE%) - -REM End of main -goto :eof -`; - - return script.replace(/\n/g, '\r\n'); -} diff --git a/apps/api/src/email/components/footer.tsx b/apps/api/src/email/components/footer.tsx new file mode 100644 index 000000000..33baadd28 --- /dev/null +++ b/apps/api/src/email/components/footer.tsx @@ -0,0 +1,18 @@ +import { Hr, Link, Section, Text } from '@react-email/components'; + +export function Footer() { + return ( +
+
+ + + AI that handles compliance for you -{' '} + Comp AI. + + + + Comp AI | 2261 Market Street, San Francisco, CA 94114 + +
+ ); +} diff --git a/apps/api/src/email/components/logo.tsx b/apps/api/src/email/components/logo.tsx new file mode 100644 index 000000000..6c6885038 --- /dev/null +++ b/apps/api/src/email/components/logo.tsx @@ -0,0 +1,15 @@ +import { Img, Section } from '@react-email/components'; + +export function Logo() { + return ( +
+ Comp AI +
+ ); +} diff --git a/apps/api/src/email/resend.ts b/apps/api/src/email/resend.ts new file mode 100644 index 000000000..b16b232be --- /dev/null +++ b/apps/api/src/email/resend.ts @@ -0,0 +1,82 @@ +import { Resend } from 'resend'; +import * as React from 'react'; + +export const resend = process.env.RESEND_API_KEY + ? new Resend(process.env.RESEND_API_KEY) + : null; + +export const sendEmail = async ({ + to, + subject, + react, + marketing, + system, + test, + cc, + scheduledAt, +}: { + to: string; + subject: string; + react: React.ReactNode; + marketing?: boolean; + system?: boolean; + test?: boolean; + cc?: string | string[]; + scheduledAt?: string; +}) => { + if (!resend) { + throw new Error('Resend not initialized - missing API key'); + } + + // 1) Pull each env var into its own constant + const fromMarketing = process.env.RESEND_FROM_MARKETING; + const fromSystem = process.env.RESEND_FROM_SYSTEM; + const fromDefault = process.env.RESEND_FROM_DEFAULT; + const toTest = process.env.RESEND_TO_TEST; + const replyMarketing = process.env.RESEND_REPLY_TO_MARKETING; + + // 2) Decide which one you need for this email + const fromAddress = marketing + ? fromMarketing + : system + ? fromSystem + : fromDefault; + + const toAddress = test ? toTest : to; + + const replyTo = marketing ? replyMarketing : undefined; + + // 3) Guard against undefined + if (!fromAddress) { + throw new Error('Missing FROM address in environment variables'); + } + if (!toAddress) { + throw new Error('Missing TO address in environment variables'); + } + + try { + const { data, error } = await resend.emails.send({ + from: fromAddress, // now always a string + to: toAddress, // now always a string + cc, + replyTo, + subject, + // @ts-ignore – React node allowed by the SDK + react, + scheduledAt, + }); + + if (error) { + console.error('Resend API error:', error); + throw new Error(`Failed to send email: ${error.message}`); + } + + return { + message: 'Email sent successfully', + id: data?.id, + }; + } catch (error) { + console.error('Email sending error:', error); + throw error instanceof Error ? error : new Error('Failed to send email'); + } +}; diff --git a/packages/email/emails/access-granted.tsx b/apps/api/src/email/templates/access-granted.tsx similarity index 86% rename from packages/email/emails/access-granted.tsx rename to apps/api/src/email/templates/access-granted.tsx index eae0b5f94..3678edfb2 100644 --- a/packages/email/emails/access-granted.tsx +++ b/apps/api/src/email/templates/access-granted.tsx @@ -20,7 +20,12 @@ interface Props { portalUrl?: string | null; } -export const AccessGrantedEmail = ({ toName, organizationName, expiresAt, portalUrl }: Props) => { +export const AccessGrantedEmail = ({ + toName, + organizationName, + expiresAt, + portalUrl, +}: Props) => { return ( @@ -50,11 +55,14 @@ export const AccessGrantedEmail = ({ toName, organizationName, expiresAt, portal Access Granted ✓ - Hello {toName}, + + Hello {toName}, + - Your NDA has been signed and your access to {organizationName}'s - policy documentation is now active. + Your NDA has been signed and your access to{' '} + {organizationName}'s policy documentation is now + active. @@ -80,8 +88,8 @@ export const AccessGrantedEmail = ({ toName, organizationName, expiresAt, portal )} - You can download your signed NDA for your records from the confirmation page or by - accessing the portal above. + You can download your signed NDA for your records from the + confirmation page or by accessing the portal above.
Lost your access link?
- Visit the trust portal and click "Already have access?" to receive a new access link - via email. + Visit the trust portal and click "Already have access?" to + receive a new access link via email.
diff --git a/packages/email/emails/access-reclaim.tsx b/apps/api/src/email/templates/access-reclaim.tsx similarity index 93% rename from packages/email/emails/access-reclaim.tsx rename to apps/api/src/email/templates/access-reclaim.tsx index f4454ca06..4a42d7bda 100644 --- a/packages/email/emails/access-reclaim.tsx +++ b/apps/api/src/email/templates/access-reclaim.tsx @@ -21,7 +21,12 @@ interface Props { expiresAt: Date; } -export const AccessReclaimEmail = ({ toName, organizationName, accessLink, expiresAt }: Props) => { +export const AccessReclaimEmail = ({ + toName, + organizationName, + accessLink, + expiresAt, +}: Props) => { return ( @@ -51,11 +56,13 @@ export const AccessReclaimEmail = ({ toName, organizationName, accessLink, expir Access Your Data - Hello {toName}, + + Hello {toName}, + - You requested access to {organizationName}'s compliance - documentation. + You requested access to {organizationName}'s + compliance documentation. diff --git a/packages/email/emails/nda-signing.tsx b/apps/api/src/email/templates/nda-signing.tsx similarity index 89% rename from packages/email/emails/nda-signing.tsx rename to apps/api/src/email/templates/nda-signing.tsx index deb4cfcbb..74f4a5a11 100644 --- a/packages/email/emails/nda-signing.tsx +++ b/apps/api/src/email/templates/nda-signing.tsx @@ -20,7 +20,11 @@ interface Props { ndaSigningLink: string; } -export const NdaSigningEmail = ({ toName, organizationName, ndaSigningLink }: Props) => { +export const NdaSigningEmail = ({ + toName, + organizationName, + ndaSigningLink, +}: Props) => { return ( @@ -50,16 +54,19 @@ export const NdaSigningEmail = ({ toName, organizationName, ndaSigningLink }: Pr NDA Signature Required - Hello {toName}, + + Hello {toName}, + - Your request to {organizationName}'s trust portal has been approved. + Your request to {organizationName}'s trust portal + has been approved. - Before you can access the policy documentation, you must review and sign a - Non-Disclosure Agreement (NDA). + Before you can access the policy documentation, you must review + and sign a Non-Disclosure Agreement (NDA). @@ -82,8 +89,8 @@ export const NdaSigningEmail = ({ toName, organizationName, ndaSigningLink }: Pr
- This link will expire in 7 days. If you need a new link, please contact the - organization. + This link will expire in 7 days. If you need a new link, please + contact the organization.
diff --git a/apps/api/src/trust-portal/email.service.ts b/apps/api/src/trust-portal/email.service.ts index f38a96401..fa00c50a7 100644 --- a/apps/api/src/trust-portal/email.service.ts +++ b/apps/api/src/trust-portal/email.service.ts @@ -1,10 +1,8 @@ import { Injectable, Logger } from '@nestjs/common'; -import { - sendEmail, - NdaSigningEmail, - AccessGrantedEmail, - AccessReclaimEmail, -} from '@trycompai/email'; +import { sendEmail } from '../email/resend'; +import { AccessGrantedEmail } from '../email/templates/access-granted'; +import { AccessReclaimEmail } from '../email/templates/access-reclaim'; +import { NdaSigningEmail } from '../email/templates/nda-signing'; @Injectable() export class TrustEmailService { diff --git a/apps/api/tsconfig.json b/apps/api/tsconfig.json index 7e903ec25..f65b04f38 100644 --- a/apps/api/tsconfig.json +++ b/apps/api/tsconfig.json @@ -24,6 +24,7 @@ "paths": { "@/*": ["./src/*"], "@db": ["./prisma/index"] - } + }, + "jsx": "react-jsx" } } diff --git a/apps/app/src/app/(app)/[orgId]/components/OnboardingTracker.tsx b/apps/app/src/app/(app)/[orgId]/components/OnboardingTracker.tsx index 59e688566..835dcf5f2 100644 --- a/apps/app/src/app/(app)/[orgId]/components/OnboardingTracker.tsx +++ b/apps/app/src/app/(app)/[orgId]/components/OnboardingTracker.tsx @@ -1,5 +1,6 @@ 'use client'; +import { Button } from '@comp/ui/button'; import { Card, CardContent } from '@comp/ui/card'; import type { Onboarding } from '@db'; import { useRealtimeRun } from '@trigger.dev/react-hooks'; @@ -11,6 +12,7 @@ import { ChevronUp, ChevronsDown, ChevronsUp, + Clock3, Loader2, Rocket, Settings, @@ -19,8 +21,8 @@ import { Zap, } from 'lucide-react'; import Link from 'next/link'; -import { usePathname } from 'next/navigation'; -import { useEffect, useMemo, useState } from 'react'; +import { usePathname, useRouter } from 'next/navigation'; +import { useCallback, useEffect, useMemo, useState } from 'react'; import { createPortal } from 'react-dom'; const ONBOARDING_STEPS = [ @@ -48,7 +50,9 @@ const getFriendlyStatusName = (status: string): string => { export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) => { const triggerJobId = onboarding.triggerJobId; + const organizationId = onboarding.organizationId; const pathname = usePathname(); + const router = useRouter(); const orgId = pathname?.split('/')[1] || ''; const [mounted, setMounted] = useState(false); const [isMinimized, setIsMinimized] = useState(false); @@ -63,6 +67,13 @@ export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) => enabled: !!triggerJobId, }); + const handleRetry = useCallback(() => { + if (!organizationId) { + return; + } + void router.push(`/onboarding/${organizationId}?retry=1`); + }, [organizationId, router]); + useEffect(() => { setMounted(true); }, []); @@ -103,34 +114,34 @@ export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) => const meta = run.metadata as Record; // Build vendorsStatus object from individual vendor status keys - const vendorsStatus: Record = {}; + const vendorsStatus: Record = {}; const vendorsInfo = (meta.vendorsInfo as Array<{ id: string; name: string }>) || []; vendorsInfo.forEach((vendor) => { const statusKey = `vendor_${vendor.id}_status`; vendorsStatus[vendor.id] = - (meta[statusKey] as 'pending' | 'processing' | 'completed') || 'pending'; + (meta[statusKey] as 'pending' | 'processing' | 'assessing' | 'completed') || 'pending'; }); // Build risksStatus object from individual risk status keys - const risksStatus: Record = {}; + const risksStatus: Record = {}; const risksInfo = (meta.risksInfo as Array<{ id: string; name: string }>) || []; risksInfo.forEach((risk) => { const statusKey = `risk_${risk.id}_status`; risksStatus[risk.id] = - (meta[statusKey] as 'pending' | 'processing' | 'completed') || 'pending'; + (meta[statusKey] as 'pending' | 'processing' | 'assessing' | 'completed') || 'pending'; }); // Build policiesStatus object from individual policy status keys - const policiesStatus: Record = {}; + const policiesStatus: Record = {}; const policiesInfo = (meta.policiesInfo as Array<{ id: string; name: string }>) || []; policiesInfo.forEach((policy) => { // Check for individual policy status key: policy_{id}_status const statusKey = `policy_${policy.id}_status`; policiesStatus[policy.id] = - (meta[statusKey] as 'pending' | 'processing' | 'completed') || 'pending'; + (meta[statusKey] as 'queued' | 'pending' | 'processing' | 'completed') || 'queued'; }); return { @@ -234,9 +245,9 @@ export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) =>
{isCompleted ? ( - + ) : ( - + )}

@@ -254,7 +265,7 @@ export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) => )}

-
+
{isCompleted && (
{/* Show completed steps */} -
+
{ONBOARDING_STEPS.map((step) => (
- + {step.label}
))} @@ -733,21 +804,32 @@ export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) => const truncatedMessage = errorMessage.length > 60 ? `${errorMessage.substring(0, 57)}...` : errorMessage; return ( -
- -
-

- Setup {friendlyStatus} -

-

{truncatedMessage}

+
+
+ +
+

Setup needs attention

+

+ Something went wrong while tailoring your environment. Retry the onboarding job or + contact support for help. +

+
+ +
+
+ +
-
); } @@ -756,14 +838,14 @@ export const OnboardingTracker = ({ onboarding }: { onboarding: Onboarding }) => return (
- +

Unknown Status

Status: {exhaustiveCheck}

+ )} + + + {hasActivePolicies && policyProgress && ( +
+
+ +
+
+ Tailoring your policies + + Personalized {policyProgress.completed}/{policyProgress.total} policies + +
+
)} - - )} - - + + + ); diff --git a/apps/app/src/app/(app)/[orgId]/policies/all/components/policy-tailoring-context.tsx b/apps/app/src/app/(app)/[orgId]/policies/all/components/policy-tailoring-context.tsx new file mode 100644 index 000000000..bb061e15c --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/policies/all/components/policy-tailoring-context.tsx @@ -0,0 +1,45 @@ +'use client'; + +import * as React from 'react'; + +export type PolicyTailoringStatus = 'queued' | 'pending' | 'processing' | 'completed'; + +interface PolicyTailoringContextValue { + getStatus: (policyId: string) => PolicyTailoringStatus | undefined; +} + +const PolicyTailoringContext = React.createContext({ + getStatus: () => undefined, +}); + +interface PolicyTailoringProviderProps { + statuses: Record; + children: React.ReactNode; +} + +export function PolicyTailoringProvider({ + statuses, + children, +}: PolicyTailoringProviderProps) { + const getStatus = React.useCallback( + (policyId: string) => statuses[policyId], + [statuses], + ); + + return ( + + {children} + + ); +} + +export function usePolicyTailoringStatus(policyId: string) { + const context = React.useContext(PolicyTailoringContext); + + if (!context) { + throw new Error('usePolicyTailoringStatus must be used within a PolicyTailoringProvider'); + } + + return context.getStatus(policyId); +} + diff --git a/apps/app/src/app/(app)/[orgId]/policies/all/page.tsx b/apps/app/src/app/(app)/[orgId]/policies/all/page.tsx index af42d76d0..9869bb70e 100644 --- a/apps/app/src/app/(app)/[orgId]/policies/all/page.tsx +++ b/apps/app/src/app/(app)/[orgId]/policies/all/page.tsx @@ -1,6 +1,7 @@ import PageWithBreadcrumb from '@/components/pages/PageWithBreadcrumb'; import { getValidFilters } from '@/lib/data-table'; import type { SearchParams } from '@/types'; +import { db } from '@db'; import type { Metadata } from 'next'; import { FullPolicyHeaderActions } from './components/FullPolicyHeaderActions'; import { PoliciesTable } from './components/policies-table'; @@ -8,12 +9,13 @@ import { getPolicies } from './data/queries'; import { searchParamsCache } from './data/validations'; interface PolicyTableProps { + params: Promise<{ orgId: string }>; searchParams: Promise; } -export default async function PoliciesPage({ ...props }: PolicyTableProps) { - const searchParams = await props.searchParams; - const search = searchParamsCache.parse(searchParams); +export default async function PoliciesPage({ params, searchParams }: PolicyTableProps) { + const [{ orgId }, resolvedSearchParams] = await Promise.all([params, searchParams]); + const search = searchParamsCache.parse(resolvedSearchParams); const validFilters = getValidFilters(search.filters); const promises = Promise.all([ @@ -23,12 +25,17 @@ export default async function PoliciesPage({ ...props }: PolicyTableProps) { }), ]); + const onboarding = await db.onboarding.findFirst({ + where: { organizationId: orgId }, + select: { triggerJobId: true }, + }); + return ( } > - + ); } diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/RisksTable.tsx b/apps/app/src/app/(app)/[orgId]/risk/(overview)/RisksTable.tsx index 5ce63cbd5..0e7bebd4c 100644 --- a/apps/app/src/app/(app)/[orgId]/risk/(overview)/RisksTable.tsx +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/RisksTable.tsx @@ -4,32 +4,247 @@ import { DataTable } from '@/components/data-table/data-table'; import { DataTableToolbar } from '@/components/data-table/data-table-toolbar'; import { CreateRiskSheet } from '@/components/sheets/create-risk-sheet'; import { useDataTable } from '@/hooks/use-data-table'; +import { getFiltersStateParser, getSortingStateParser } from '@/lib/parsers'; import { useSession } from '@/utils/auth-client'; import type { Member, Risk, User } from '@db'; +import { Risk as RiskType } from '@db'; import { ColumnDef } from '@tanstack/react-table'; -import { useQueryState } from 'nuqs'; -import { useMemo } from 'react'; +import { Loader2 } from 'lucide-react'; +import { + parseAsArrayOf, + parseAsInteger, + parseAsString, + parseAsStringEnum, + useQueryState, +} from 'nuqs'; +import { useCallback, useMemo } from 'react'; +import useSWR from 'swr'; +import * as z from 'zod'; +import { getRisksAction } from './actions/get-risks-action'; +import { RiskOnboardingProvider } from './components/risk-onboarding-context'; +import { RisksLoadingAnimation } from './components/risks-loading-animation'; import { columns as getColumns } from './components/table/RiskColumns'; +import type { GetRiskSchema } from './data/validations'; +import { useOnboardingStatus } from './hooks/use-onboarding-status'; -export type RiskRow = Risk & { assignee: User | null }; +export type RiskRow = Risk & { assignee: User | null; isPending?: boolean; isAssessing?: boolean }; + +const ACTIVE_STATUSES: Array<'pending' | 'processing' | 'created' | 'assessing'> = [ + 'pending', + 'processing', + 'created', + 'assessing', +]; export const RisksTable = ({ - risks, + risks: initialRisks, assignees, - pageCount, + pageCount: initialPageCount, + onboardingRunId, + searchParams: initialSearchParams, }: { risks: RiskRow[]; assignees: (Member & { user: User })[]; pageCount: number; + onboardingRunId?: string | null; + searchParams: GetRiskSchema; }) => { const session = useSession(); const orgId = session?.data?.session?.activeOrganizationId; const [_, setOpenSheet] = useQueryState('create-risk-sheet'); + const { itemStatuses, progress, itemsInfo, isActive, isLoading } = useOnboardingStatus( + onboardingRunId, + 'risks', + ); + + // Read current search params from URL (synced with table state via useDataTable) + const [page] = useQueryState('page', parseAsInteger.withDefault(1)); + const [perPage] = useQueryState('perPage', parseAsInteger.withDefault(50)); + const [title] = useQueryState('title', parseAsString.withDefault('')); + const [sort] = useQueryState( + 'sort', + getSortingStateParser().withDefault([{ id: 'title', desc: true }]), + ); + const [filters] = useQueryState('filters', getFiltersStateParser().withDefault([])); + const [joinOperator] = useQueryState( + 'joinOperator', + parseAsStringEnum(['and', 'or']).withDefault('and'), + ); + const [lastUpdated] = useQueryState( + 'lastUpdated', + parseAsArrayOf(z.coerce.date()).withDefault([]), + ); + + // Build current search params from URL state + const currentSearchParams = useMemo(() => { + return { + page, + perPage, + title, + sort, + filters, + joinOperator, + lastUpdated, + }; + }, [page, perPage, title, sort, filters, joinOperator, lastUpdated]); + + // Create stable SWR key from current search params + const swrKey = useMemo(() => { + if (!orgId) return null; + // Serialize search params to create a stable key + const key = JSON.stringify(currentSearchParams); + return ['risks', orgId, key] as const; + }, [orgId, currentSearchParams]); + + // Fetcher function for SWR + const fetcher = useCallback(async () => { + return await getRisksAction(currentSearchParams); + }, [currentSearchParams]); + + // Use SWR to fetch risks with polling when onboarding is active + const { data: risksData } = useSWR(swrKey, fetcher, { + fallbackData: { data: initialRisks, pageCount: initialPageCount }, + refreshInterval: isActive ? 1000 : 0, // Poll every 1 second when onboarding is active + revalidateOnFocus: false, + revalidateOnReconnect: true, + keepPreviousData: true, + }); + + const risks = risksData?.data || initialRisks; + const pageCount = risksData?.pageCount ?? initialPageCount; + + // Check if all risks are done assessing (either completed in metadata or closed in DB) + // Also check if there are any pending/processing risks in metadata that haven't been created yet + const allRisksDoneAssessing = useMemo(() => { + // If no risks exist yet, we're not done + if (risks.length === 0) { + // But check if there are risks in metadata that should exist + if (itemsInfo.length > 0) return false; + return false; + } + + // Check if we're still creating risks by comparing DB count with expected total + // If progress.total exists and risks.length < progress.total, we're still creating + if (progress && risks.length < progress.total) { + return false; + } + + // If there are pending/processing risks in metadata that aren't in DB yet, we're not done + const hasPendingRisks = itemsInfo.some((item) => { + const status = itemStatuses[item.id]; + return ( + (status === 'pending' || + status === 'processing' || + status === 'created' || + status === 'assessing') && + !risks.some((r) => r.id === item.id) + ); + }); + + if (hasPendingRisks) return false; + + // Check if all risks in DB are either: + // 1. Completed in metadata (status === 'completed') + // 2. Closed in database (status === 'closed') + const allDbRisksDone = risks.every((risk) => { + const metadataStatus = itemStatuses[risk.id]; + return metadataStatus === 'completed' || risk.status === 'closed'; + }); + + // Also check if there are any risks in metadata that are still assessing + const hasAssessingRisks = Object.values(itemStatuses).some( + (status) => status === 'assessing' || status === 'processing', + ); + + return allDbRisksDone && !hasAssessingRisks; + }, [risks, itemStatuses, itemsInfo, progress]); + + // Merge DB risks with metadata risks (pending ones) + const mergedRisks = useMemo(() => { + const dbRiskIds = new Set(risks.map((r) => r.id)); + + // Mark risks in DB as "assessing" if they're open and onboarding is active + // Don't mark as assessing if risk is already closed (resolved) + const risksWithStatus = risks.map((risk) => { + const metadataStatus = itemStatuses[risk.id]; + // If risk exists in DB but status is open and onboarding is active, it's being assessed + // Only mark as assessing if status is open (not closed) + if (risk.status === 'open' && isActive && onboardingRunId && !metadataStatus) { + return { ...risk, isAssessing: true }; + } + return risk; + }); + + const pendingRisks: RiskRow[] = itemsInfo + .filter((item) => { + // Only show items that are pending/processing and not yet in DB + const status = itemStatuses[item.id]; + return ( + (status === 'pending' || status === 'processing') && + !dbRiskIds.has(item.id) && + !item.id.startsWith('temp_') + ); + }) + .map((item) => { + // Create a placeholder risk row for pending items + const status = itemStatuses[item.id]; + return { + id: item.id, + title: item.name, + description: 'Being researched and created by AI...', + category: 'other' as const, + department: null, + status: 'open' as const, + likelihood: 'very_unlikely' as const, + impact: 'insignificant' as const, + residualLikelihood: 'very_unlikely' as const, + residualImpact: 'insignificant' as const, + treatmentStrategy: 'accept' as const, + treatmentStrategyDescription: null, + organizationId: orgId || '', + assigneeId: null, + assignee: null, + createdAt: new Date(), + updatedAt: new Date(), + isPending: true, + } as RiskRow; + }); + + // Also handle temp IDs (risks being created) + const tempRisks: RiskRow[] = itemsInfo + .filter((item) => item.id.startsWith('temp_')) + .map((item) => { + const status = itemStatuses[item.id]; + return { + id: item.id, + title: item.name, + description: 'Being researched and created by AI...', + category: 'other' as const, + department: null, + status: 'open' as const, + likelihood: 'very_unlikely' as const, + impact: 'insignificant' as const, + residualLikelihood: 'very_unlikely' as const, + residualImpact: 'insignificant' as const, + treatmentStrategy: 'accept' as const, + treatmentStrategyDescription: null, + organizationId: orgId || '', + assigneeId: null, + assignee: null, + createdAt: new Date(), + updatedAt: new Date(), + isPending: true, + } as RiskRow; + }); + + return [...risksWithStatus, ...pendingRisks, ...tempRisks]; + }, [risks, itemsInfo, itemStatuses, orgId, isActive, onboardingRunId]); + const columns = useMemo[]>(() => getColumns(orgId ?? ''), [orgId]); const { table } = useDataTable({ - data: risks, + data: mergedRisks, columns, pageCount, getRowId: (row) => row.id, @@ -45,11 +260,121 @@ export const RisksTable = ({ clearOnDefault: true, }); + const getRowProps = useMemo( + () => (risk: RiskRow) => { + const status = itemStatuses[risk.id] || (risk.isPending ? 'pending' : undefined); + const isAssessing = risk.isAssessing || status === 'assessing'; + const isBlocked = + (status && + ACTIVE_STATUSES.includes(status as 'pending' | 'processing' | 'created' | 'assessing')) || + isAssessing; + + if (!isBlocked) { + return {}; + } + + return { + disabled: true, + className: + 'relative bg-muted/40 opacity-70 pointer-events-none after:absolute after:inset-0 after:bg-background/40 after:content-[""] after:animate-pulse', + }; + }, + [itemStatuses], + ); + + // Calculate actual assessment progress + const assessmentProgress = useMemo(() => { + if (!progress || !itemsInfo.length) { + return null; + } + + // Count risks that are completed (either 'completed' in metadata or 'closed' in DB) + const completedCount = risks.filter((risk) => { + const metadataStatus = itemStatuses[risk.id]; + return metadataStatus === 'completed' || risk.status === 'closed'; + }).length; + + // Also count risks in metadata that are completed but not yet in DB + const completedInMetadata = Object.values(itemStatuses).filter( + (status) => status === 'completed', + ).length; + + // Total is the max of progress.total, itemsInfo.length, or actual risks created + const total = Math.max(progress.total, itemsInfo.length, risks.length); + + // Completed is the max of DB closed risks or metadata completed + const completed = Math.max(completedCount, completedInMetadata); + + return { total, completed }; + }, [progress, itemsInfo, risks, itemStatuses]); + + const isEmpty = mergedRisks.length === 0; + // Show empty state if onboarding is active (even if progress metadata isn't set yet) + const showEmptyState = isEmpty && onboardingRunId && isActive; + + // Prevent flicker: if we're loading onboarding status and have a runId, render null + // Once we know the status, show animation if empty and active, otherwise show table + if (onboardingRunId && isLoading) { + return null; + } + + // Show loading animation instead of table when empty and onboarding is active + if (showEmptyState) { + return ( + <> + + + + ); + } + return ( <> - row.id} rowClickBasePath={`/${orgId}/risk`}> - - + + row.id} + rowClickBasePath={`/${orgId}/risk`} + getRowProps={getRowProps} + > + <> + + {isActive && !allRisksDoneAssessing && ( +
+
+ +
+
+ + {assessmentProgress + ? assessmentProgress.completed === 0 + ? 'Researching and creating risks' + : assessmentProgress.completed < assessmentProgress.total + ? 'Assessing risks and generating mitigation plans' + : 'Assessing risks and generating mitigation plans' + : progress + ? progress.completed === 0 + ? 'Researching and creating risks' + : 'Assessing risks and generating mitigation plans' + : 'Researching and creating risks'} + + + {assessmentProgress + ? assessmentProgress.completed === 0 + ? 'AI is analyzing your organization...' + : `${assessmentProgress.completed}/${assessmentProgress.total} risks assessed` + : progress + ? progress.completed === 0 + ? 'AI is analyzing your organization...' + : `${progress.completed}/${progress.total} risks created` + : 'AI is analyzing your organization...'} + +
+
+ )} + +
+
); diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/actions/get-risks-action.ts b/apps/app/src/app/(app)/[orgId]/risk/(overview)/actions/get-risks-action.ts new file mode 100644 index 000000000..f96123dd8 --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/actions/get-risks-action.ts @@ -0,0 +1,8 @@ +'use server'; + +import { getRisks } from '../data/getRisks'; +import type { GetRiskSchema } from '../data/validations'; + +export async function getRisksAction(input: GetRiskSchema) { + return await getRisks(input); +} diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/risk-onboarding-context.tsx b/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/risk-onboarding-context.tsx new file mode 100644 index 000000000..ec71467ac --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/risk-onboarding-context.tsx @@ -0,0 +1,45 @@ +'use client'; + +import * as React from 'react'; + +export type RiskOnboardingStatus = 'pending' | 'processing' | 'created' | 'assessing' | 'completed'; + +interface RiskOnboardingContextValue { + getStatus: (riskId: string) => RiskOnboardingStatus | undefined; +} + +const RiskOnboardingContext = React.createContext({ + getStatus: () => undefined, +}); + +interface RiskOnboardingProviderProps { + statuses: Record; + children: React.ReactNode; +} + +export function RiskOnboardingProvider({ + statuses, + children, +}: RiskOnboardingProviderProps) { + const getStatus = React.useCallback( + (riskId: string) => statuses[riskId], + [statuses], + ); + + return ( + + {children} + + ); +} + +export function useRiskOnboardingStatus(riskId: string) { + const context = React.useContext(RiskOnboardingContext); + + if (!context) { + throw new Error('useRiskOnboardingStatus must be used within a RiskOnboardingProvider'); + } + + return context.getStatus(riskId); +} + diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/risks-loading-animation.tsx b/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/risks-loading-animation.tsx new file mode 100644 index 000000000..6fa9af838 --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/risks-loading-animation.tsx @@ -0,0 +1,13 @@ +'use client'; + +import { OnboardingLoadingAnimation } from '@/components/onboarding-loading-animation'; + +export function RisksLoadingAnimation() { + return ( + + ); +} diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/table/RiskColumns.tsx b/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/table/RiskColumns.tsx index e6c07e9bf..fd95e8a50 100644 --- a/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/table/RiskColumns.tsx +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/components/table/RiskColumns.tsx @@ -3,9 +3,10 @@ import { StatusIndicator } from '@/components/status-indicator'; import { Avatar, AvatarFallback, AvatarImage } from '@comp/ui/avatar'; import { Badge } from '@comp/ui/badge'; import type { ColumnDef } from '@tanstack/react-table'; -import { UserIcon } from 'lucide-react'; +import { Loader2, UserIcon } from 'lucide-react'; import Link from 'next/link'; import { RiskRow } from '../../RisksTable'; +import { useRiskOnboardingStatus } from '../risk-onboarding-context'; export const columns = (orgId: string): ColumnDef[] => [ { @@ -13,11 +14,7 @@ export const columns = (orgId: string): ColumnDef[] => [ accessorKey: 'title', header: ({ column }) => , cell: ({ row }) => { - return ( - - {row.original.title} - - ); + return ; }, meta: { label: 'Risk', @@ -35,7 +32,7 @@ export const columns = (orgId: string): ColumnDef[] => [ accessorKey: 'status', header: ({ column }) => , cell: ({ row }) => { - return ; + return ; }, meta: { label: 'Status', @@ -102,3 +99,65 @@ export const columns = (orgId: string): ColumnDef[] => [ enableColumnFilter: true, }, ]; + +function RiskNameCell({ row, orgId }: { row: { original: RiskRow }; orgId: string }) { + const risk = row.original; + const status = useRiskOnboardingStatus(risk.id); + const isPending = risk.isPending; + // Don't show active status if risk is already closed (mitigated) + const isResolved = risk.status === 'closed'; + const isActive = + !isResolved && + (status === 'pending' || + status === 'processing' || + status === 'created' || + status === 'assessing'); + + if (isPending || isActive) { + return ( +
+ + {risk.title} +
+ ); + } + + return ( + + {risk.title} + + ); +} + +function RiskStatusCell({ row }: { row: { original: RiskRow } }) { + const risk = row.original; + const status = useRiskOnboardingStatus(risk.id); + const isPending = risk.isPending; + const isResolved = risk.status === 'closed'; + // Don't show assessing if risk is already resolved + const isAssessing = !isResolved && (risk.isAssessing || status === 'assessing'); + const isActive = + !isResolved && + (status === 'pending' || + status === 'processing' || + status === 'created' || + status === 'assessing'); + + if (isPending || isActive) { + const statusText = + status === 'pending' || status === 'processing' || isPending + ? 'Creating...' + : status === 'assessing' || isAssessing + ? 'Assessing...' + : 'Processing...'; + + return ( +
+ + {statusText} +
+ ); + } + + return ; +} diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/hooks/use-onboarding-status.ts b/apps/app/src/app/(app)/[orgId]/risk/(overview)/hooks/use-onboarding-status.ts new file mode 100644 index 000000000..5b53b1975 --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/hooks/use-onboarding-status.ts @@ -0,0 +1,106 @@ +'use client'; + +import { useRealtimeRun } from '@trigger.dev/react-hooks'; +import { useMemo } from 'react'; + +export type OnboardingItemStatus = 'pending' | 'processing' | 'created' | 'assessing' | 'completed'; + +export interface OnboardingItemInfo { + id: string; + name: string; +} + +export function useOnboardingStatus( + onboardingRunId: string | null | undefined, + itemType: 'risks' | 'vendors', +) { + const shouldSubscribe = Boolean(onboardingRunId); + const { run } = useRealtimeRun(shouldSubscribe ? onboardingRunId! : '', { + enabled: shouldSubscribe, + }); + + const itemStatuses = useMemo>(() => { + if (!run?.metadata) { + return {}; + } + + const meta = run.metadata as Record; + const itemsInfo = (meta[`${itemType}Info`] as Array<{ id: string; name: string }>) || []; + + return itemsInfo.reduce>((acc, item) => { + const statusKey = `${itemType.slice(0, -1)}_${item.id}_status`; + const status = meta[statusKey]; + + if ( + status === 'pending' || + status === 'processing' || + status === 'created' || + status === 'assessing' || + status === 'completed' + ) { + acc[item.id] = status; + } + return acc; + }, {}); + }, [run?.metadata, itemType]); + + const progress = useMemo(() => { + if (!run?.metadata) return null; + + const meta = run.metadata as Record; + const total = + typeof meta[`${itemType}Total`] === 'number' ? (meta[`${itemType}Total`] as number) : 0; + const completed = + typeof meta[`${itemType}Completed`] === 'number' + ? (meta[`${itemType}Completed`] as number) + : 0; + + if (total === 0) { + return null; + } + + return { total, completed }; + }, [run?.metadata, itemType]); + + const itemsInfo = useMemo(() => { + if (!run?.metadata) { + return []; + } + + const meta = run.metadata as Record; + return (meta[`${itemType}Info`] as Array<{ id: string; name: string }>) || []; + }, [run?.metadata, itemType]); + + // Check if any items are still being processed (not completed) + const hasActiveItems = useMemo(() => { + return Object.values(itemStatuses).some( + (status) => status !== 'completed' && status !== undefined, + ); + }, [itemStatuses]); + + // Check if onboarding run is active based on run status + const isRunActive = useMemo(() => { + if (!run) return false; + // Run is active if it's executing, queued, or waiting + const activeStatuses = ['EXECUTING', 'QUEUED', 'WAITING']; + return activeStatuses.includes(run.status); + }, [run]); + + // Check if items are still being processed + const hasActiveProgress = progress !== null && progress.completed < progress.total; + + // Onboarding is active if: + // 1. Run is active (executing/queued), OR + // 2. There's active progress, OR + // 3. There are active items being processed + const isActive = isRunActive || hasActiveProgress || hasActiveItems; + + return { + itemStatuses, + progress, + itemsInfo, + isActive, + isLoading: shouldSubscribe && !run, + runStatus: run?.status, + }; +} diff --git a/apps/app/src/app/(app)/[orgId]/risk/(overview)/page.tsx b/apps/app/src/app/(app)/[orgId]/risk/(overview)/page.tsx index fd0f15dec..7273c18c7 100644 --- a/apps/app/src/app/(app)/[orgId]/risk/(overview)/page.tsx +++ b/apps/app/src/app/(app)/[orgId]/risk/(overview)/page.tsx @@ -29,19 +29,26 @@ export default async function RiskRegisterPage(props: { const search = searchParamsCache.parse(searchParams); const validFilters = getValidFilters(search.filters); - const risksResult = await getRisks({ + const searchParamsForTable = { ...search, filters: validFilters, - }); + }; + + const [risksResult, assignees, onboarding] = await Promise.all([ + getRisks(searchParamsForTable), + getAssignees(), + db.onboarding.findFirst({ + where: { organizationId: orgId }, + select: { triggerJobId: true }, + }), + ]); - const assignees = await getAssignees(); + const isEmpty = risksResult.data?.length === 0; + const isDefaultView = search.page === 1 && search.title === '' && validFilters.length === 0; + const isOnboardingActive = Boolean(onboarding?.triggerJobId); - if ( - risksResult.data?.length === 0 && - search.page === 1 && - search.title === '' && - validFilters.length === 0 - ) { + // Show AppOnboarding only if empty, default view, AND onboarding is not active + if (isEmpty && isDefaultView && !isOnboardingActive) { return (
); diff --git a/apps/app/src/app/(app)/[orgId]/risk/[riskId]/actions/regenerate-risk-mitigation.ts b/apps/app/src/app/(app)/[orgId]/risk/[riskId]/actions/regenerate-risk-mitigation.ts index 63dcfcf99..0c5565b14 100644 --- a/apps/app/src/app/(app)/[orgId]/risk/[riskId]/actions/regenerate-risk-mitigation.ts +++ b/apps/app/src/app/(app)/[orgId]/risk/[riskId]/actions/regenerate-risk-mitigation.ts @@ -2,6 +2,11 @@ import { authActionClient } from '@/actions/safe-action'; import { generateRiskMitigation } from '@/jobs/tasks/onboarding/generate-risk-mitigation'; +import { + findCommentAuthor, + type PolicyContext, +} from '@/jobs/tasks/onboarding/onboard-organization-helpers'; +import { db } from '@db'; import { tasks } from '@trigger.dev/sdk'; import { z } from 'zod'; @@ -26,9 +31,30 @@ export const regenerateRiskMitigationAction = authActionClient throw new Error('No active organization'); } + const organizationId = session.activeOrganizationId; + + const [author, policyRows] = await Promise.all([ + findCommentAuthor(organizationId), + db.policy.findMany({ + where: { organizationId }, + select: { name: true, description: true }, + }), + ]); + + if (!author) { + throw new Error('No eligible author found to regenerate the mitigation'); + } + + const policies: PolicyContext[] = policyRows.map((policy) => ({ + name: policy.name, + description: policy.description, + })); + await tasks.trigger('generate-risk-mitigation', { - organizationId: session.activeOrganizationId, + organizationId, riskId, + authorId: author.id, + policies, }); return { success: true }; diff --git a/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/[automationId]/actions/generate-suggestions.ts b/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/[automationId]/actions/generate-suggestions.ts index 69d14425a..178b1f13d 100644 --- a/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/[automationId]/actions/generate-suggestions.ts +++ b/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/automation/[automationId]/actions/generate-suggestions.ts @@ -1,8 +1,8 @@ 'use server'; -import { openai } from '@ai-sdk/openai'; +import { groq } from '@ai-sdk/groq'; import { db } from '@db'; -import { generateObject } from 'ai'; +import { generateObject, NoObjectGeneratedError } from 'ai'; import { performance } from 'perf_hooks'; import { z } from 'zod'; import { @@ -80,21 +80,63 @@ export async function generateAutomationSuggestions( // Generate AI suggestions const aiStartTime = performance.now(); - const { object, usage } = await generateObject({ - model: openai('gpt-4.1-mini'), // Testing gpt-5-nano for suggestions - schema: SuggestionsSchema, - system: AUTOMATION_SUGGESTIONS_SYSTEM_PROMPT, - prompt: getAutomationSuggestionsPrompt(taskDescription, vendorList, contextInfo), - }); - const aiTime = performance.now() - aiStartTime; - console.log( - `[generateAutomationSuggestions] AI generation completed in ${aiTime.toFixed(2)}ms (total tokens: ${usage?.totalTokens || 'unknown'})`, - ); + try { + const { object, usage } = await generateObject({ + model: groq('meta-llama/llama-4-scout-17b-16e-instruct'), + schema: SuggestionsSchema, + system: AUTOMATION_SUGGESTIONS_SYSTEM_PROMPT, + prompt: getAutomationSuggestionsPrompt(taskDescription, vendorList, contextInfo), + }); + const aiTime = performance.now() - aiStartTime; + console.log( + `[generateAutomationSuggestions] AI generation completed in ${aiTime.toFixed(2)}ms (total tokens: ${usage?.totalTokens || 'unknown'})`, + ); - const totalTime = performance.now() - startTime; - console.log( - `[generateAutomationSuggestions] Total time: ${totalTime.toFixed(2)}ms (vendors: ${vendorsTime.toFixed(2)}ms, context: ${contextTime.toFixed(2)}ms, AI: ${aiTime.toFixed(2)}ms)`, - ); + const totalTime = performance.now() - startTime; + console.log( + `[generateAutomationSuggestions] Total time: ${totalTime.toFixed(2)}ms (vendors: ${vendorsTime.toFixed(2)}ms, context: ${contextTime.toFixed(2)}ms, AI: ${aiTime.toFixed(2)}ms)`, + ); + + // Handle case where model returns single object instead of array + let suggestions = object.suggestions; + if (!Array.isArray(suggestions)) { + if (suggestions && typeof suggestions === 'object' && 'title' in suggestions) { + suggestions = [suggestions]; + } else { + suggestions = []; + } + } - return object.suggestions; + return suggestions; + } catch (error) { + const aiTime = performance.now() - aiStartTime; + console.error('[generateAutomationSuggestions] Error generating suggestions:', error); + // Try to extract suggestions from error if available + if (NoObjectGeneratedError.isInstance(error)) { + try { + const errorText = error.text; + if (errorText) { + const parsed = JSON.parse(errorText); + if (parsed.suggestions) { + const suggestions = Array.isArray(parsed.suggestions) + ? parsed.suggestions + : [parsed.suggestions]; + if (suggestions.length > 0 && suggestions[0].title) { + console.log( + `[generateAutomationSuggestions] Recovered ${suggestions.length} suggestions from error response`, + ); + return suggestions; + } + } + } + } catch { + // Ignore parse errors + } + } + const totalTime = performance.now() - startTime; + console.log( + `[generateAutomationSuggestions] Total time: ${totalTime.toFixed(2)}ms (vendors: ${vendorsTime.toFixed(2)}ms, context: ${contextTime.toFixed(2)}ms, AI: ${aiTime.toFixed(2)}ms) - FAILED`, + ); + return []; + } } diff --git a/apps/app/src/app/(app)/[orgId]/trust/components/grants-tab.tsx b/apps/app/src/app/(app)/[orgId]/trust/components/grants-tab.tsx index a79f86dbb..0e2384124 100644 --- a/apps/app/src/app/(app)/[orgId]/trust/components/grants-tab.tsx +++ b/apps/app/src/app/(app)/[orgId]/trust/components/grants-tab.tsx @@ -1,6 +1,7 @@ import { useAccessGrants } from '@/hooks/use-access-requests'; import { Badge } from '@comp/ui/badge'; import { Button } from '@comp/ui/button'; +import { Skeleton } from '@comp/ui/skeleton'; import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from '@comp/ui/table'; import { useState } from 'react'; import { RevokeDialog } from './revoke-dialog'; @@ -9,14 +10,6 @@ export function GrantsTab({ orgId }: { orgId: string }) { const { data, isLoading } = useAccessGrants(orgId); const [revokeId, setRevokeId] = useState(null); - if (isLoading) { - return
Loading grants...
; - } - - if (!data || data.length === 0) { - return
No access grants yet
; - } - return (
@@ -30,35 +23,70 @@ export function GrantsTab({ orgId }: { orgId: string }) { - {data.map((grant) => ( - - {grant.subjectEmail} - - - {grant.status} - - - {new Date(grant.expiresAt).toLocaleDateString()} - - {grant.revokedAt ? new Date(grant.revokedAt).toLocaleDateString() : '-'} - - - {grant.status === 'active' && ( - - )} - - - ))} + {isLoading + ? Array.from({ length: 5 }).map((_, index) => ( + + + + + + + + + + + + + + + + + + )) + : data && data.length > 0 + ? data.map((grant) => ( + + {grant.subjectEmail} + + + {grant.status} + + + {new Date(grant.expiresAt).toLocaleDateString()} + + {grant.revokedAt ? new Date(grant.revokedAt).toLocaleDateString() : '-'} + + + {grant.status === 'active' && ( + + )} + + + )) + : ( + + + No access grants yet + + + )}
{revokeId && ( diff --git a/apps/app/src/app/(app)/[orgId]/trust/components/request-tab.tsx b/apps/app/src/app/(app)/[orgId]/trust/components/request-tab.tsx index 10d5630d9..7ab59c057 100644 --- a/apps/app/src/app/(app)/[orgId]/trust/components/request-tab.tsx +++ b/apps/app/src/app/(app)/[orgId]/trust/components/request-tab.tsx @@ -1,6 +1,7 @@ import { useAccessRequests, usePreviewNda, useResendNda } from '@/hooks/use-access-requests'; import { Badge } from '@comp/ui/badge'; import { Button } from '@comp/ui/button'; +import { Skeleton } from '@comp/ui/skeleton'; import { Table, TableBody, TableCell, TableHead, TableHeader, TableRow } from '@comp/ui/table'; import { useState } from 'react'; import { toast } from 'sonner'; @@ -36,14 +37,6 @@ export function RequestsTab({ orgId }: { orgId: string }) { ); }; - if (isLoading) { - return
Loading requests...
; - } - - if (!data || data.length === 0) { - return
No access requests yet
; - } - return (
@@ -61,72 +54,116 @@ export function RequestsTab({ orgId }: { orgId: string }) { - {data.map((request) => { - const ndaPending = request.status === 'approved' && !request.grant; - return ( - - {new Date(request.createdAt).toLocaleDateString()} - {request.name} - {request.email} - {request.company || '-'} - {request.purpose || '-'} - {request.requestedDurationDays ?? 30}d - - - {request.status} - + {isLoading ? ( + Array.from({ length: 5 }).map((_, index) => ( + + + - - {ndaPending ? ( - pending - ) : request.grant ? ( - signed - ) : ( - '-' - )} + + - -
- - - {ndaPending && ( + {request.status} + + + + {ndaPending ? ( + pending + ) : request.grant ? ( + signed + ) : ( + '-' + )} + + +
- )} - -
-
- - ); - })} + + {ndaPending && ( + + )} + +
+
+
+ ); + }) + ) : ( + + + No access requests yet + + + )}
{approveId && ( diff --git a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/actions/get-vendors-action.ts b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/actions/get-vendors-action.ts new file mode 100644 index 000000000..7fd49867f --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/actions/get-vendors-action.ts @@ -0,0 +1,19 @@ +'use server'; + +import { auth } from '@/utils/auth'; +import { headers } from 'next/headers'; +import { getVendors } from '../data/queries'; +import type { GetVendorsSchema } from '../data/validations'; + +export async function getVendorsAction(input: GetVendorsSchema) { + const session = await auth.api.getSession({ + headers: await headers(), + }); + + if (!session?.session.activeOrganizationId) { + return { data: [], pageCount: 0 }; + } + + return await getVendors(session.session.activeOrganizationId, input); +} + diff --git a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorColumns.tsx b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorColumns.tsx index 8e81d6ff4..09c48b390 100644 --- a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorColumns.tsx +++ b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorColumns.tsx @@ -2,15 +2,60 @@ import { DataTableColumnHeader } from '@/components/data-table/data-table-column import { VendorStatus } from '@/components/vendor-status'; import { Avatar, AvatarFallback, AvatarImage } from '@comp/ui/avatar'; import { Badge } from '@comp/ui/badge'; -import type { ColumnDef } from '@tanstack/react-table'; -import { UserIcon } from 'lucide-react'; +import type { ColumnDef, Row } from '@tanstack/react-table'; +import { Loader2, UserIcon } from 'lucide-react'; import Link from 'next/link'; -import type { GetVendorsResult } from '../data/queries'; +import { useVendorOnboardingStatus } from './vendor-onboarding-context'; import { VendorDeleteCell } from './VendorDeleteCell'; +import type { VendorRow } from './VendorsTable'; -type VendorRow = GetVendorsResult['data'][number]; +function VendorNameCell({ row, orgId }: { row: Row; orgId: string }) { + const vendorId = row.original.id; + const onboardingStatus = useVendorOnboardingStatus(); + const status = onboardingStatus[vendorId]; + const isPending = row.original.isPending || status === 'pending' || status === 'processing'; + const isAssessing = row.original.isAssessing || status === 'assessing'; + const isResolved = row.original.status === 'assessed'; -export const columns: ColumnDef[] = [ + if ((isPending || isAssessing) && !isResolved) { + return ( +
+ + {row.original.name} +
+ ); + } + return {row.original.name}; +} + +function VendorStatusCell({ row }: { row: Row }) { + const vendorId = row.original.id; + const onboardingStatus = useVendorOnboardingStatus(); + const status = onboardingStatus[vendorId]; + const isPending = row.original.isPending || status === 'pending' || status === 'processing'; + const isAssessing = row.original.isAssessing || status === 'assessing'; + const isResolved = row.original.status === 'assessed'; + + if (isPending && !isResolved) { + return ( +
+ + Creating... +
+ ); + } + if (isAssessing && !isResolved) { + return ( +
+ + Assessing... +
+ ); + } + return ; +} + +export const columns = (orgId: string): ColumnDef[] => [ { id: 'name', accessorKey: 'name', @@ -18,11 +63,7 @@ export const columns: ColumnDef[] = [ return ; }, cell: ({ row }) => { - return ( - - {row.original.name} - - ); + return ; }, meta: { label: 'Vendor Name', @@ -41,7 +82,7 @@ export const columns: ColumnDef[] = [ return ; }, cell: ({ row }) => { - return ; + return ; }, meta: { label: 'Status', diff --git a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorsTable.tsx b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorsTable.tsx index 0da8d6548..7cd700543 100644 --- a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorsTable.tsx +++ b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/VendorsTable.tsx @@ -2,47 +2,382 @@ import { DataTable } from '@/components/data-table/data-table'; import { DataTableToolbar } from '@/components/data-table/data-table-toolbar'; +import { OnboardingLoadingAnimation } from '@/components/onboarding-loading-animation'; import { useDataTable } from '@/hooks/use-data-table'; -import { useParams } from 'next/navigation'; -import * as React from 'react'; +import { getFiltersStateParser, getSortingStateParser } from '@/lib/parsers'; +import { useSession } from '@/utils/auth-client'; +import { Departments, Vendor } from '@db'; +import { ColumnDef } from '@tanstack/react-table'; +import { Loader2 } from 'lucide-react'; +import { parseAsInteger, parseAsString, parseAsStringEnum, useQueryState } from 'nuqs'; +import { useCallback, useMemo } from 'react'; +import useSWR from 'swr'; import { CreateVendorSheet } from '../../components/create-vendor-sheet'; +import { getVendorsAction } from '../actions/get-vendors-action'; import type { GetAssigneesResult, GetVendorsResult } from '../data/queries'; -import { columns } from './VendorColumns'; +import type { GetVendorsSchema } from '../data/validations'; +import { useOnboardingStatus } from '../hooks/use-onboarding-status'; +import { VendorOnboardingProvider } from './vendor-onboarding-context'; +import { columns as getColumns } from './VendorColumns'; + +export type VendorRow = GetVendorsResult['data'][number] & { + isPending?: boolean; + isAssessing?: boolean; +}; + +const ACTIVE_STATUSES: Array<'pending' | 'processing' | 'created' | 'assessing'> = [ + 'pending', + 'processing', + 'created', + 'assessing', +]; interface VendorsTableProps { - promises: Promise<[GetVendorsResult, GetAssigneesResult]>; + vendors: GetVendorsResult['data']; + pageCount: number; + assignees: GetAssigneesResult; + onboardingRunId?: string | null; + searchParams: GetVendorsSchema; } -export function VendorsTable({ promises }: VendorsTableProps) { - const { orgId } = useParams(); +export function VendorsTable({ + vendors: initialVendors, + pageCount: initialPageCount, + assignees, + onboardingRunId, + searchParams: initialSearchParams, +}: VendorsTableProps) { + const session = useSession(); + const orgId = session?.data?.session?.activeOrganizationId; + + const { itemStatuses, progress, itemsInfo, isActive, isLoading } = useOnboardingStatus( + onboardingRunId, + 'vendors', + ); + + // Read current search params from URL (synced with table state via useDataTable) + const [page] = useQueryState('page', parseAsInteger.withDefault(1)); + const [perPage] = useQueryState('perPage', parseAsInteger.withDefault(50)); + const [name] = useQueryState('name', parseAsString.withDefault('')); + const [status] = useQueryState( + 'status', + parseAsStringEnum(['not_assessed', 'assessed'] as const), + ); + const [department] = useQueryState( + 'department', + parseAsStringEnum(Object.values(Departments)), + ); + const [assigneeId] = useQueryState('assigneeId', parseAsString); + const [sort] = useQueryState( + 'sort', + getSortingStateParser().withDefault([{ id: 'name', desc: false }]), + ); + const [filters] = useQueryState('filters', getFiltersStateParser().withDefault([])); + const [joinOperator] = useQueryState( + 'joinOperator', + parseAsStringEnum(['and', 'or']).withDefault('and'), + ); + + // Build current search params from URL state + const currentSearchParams = useMemo(() => { + return { + page, + perPage, + name, + status: status ?? null, + department: department ?? null, + assigneeId: assigneeId ?? null, + sort, + filters, + joinOperator, + }; + }, [page, perPage, name, status, department, assigneeId, sort, filters, joinOperator]); + + // Create stable SWR key from current search params + const swrKey = useMemo(() => { + if (!orgId) return null; + // Serialize search params to create a stable key + const key = JSON.stringify(currentSearchParams); + return ['vendors', orgId, key] as const; + }, [orgId, currentSearchParams]); + + // Fetcher function for SWR + const fetcher = useCallback(async () => { + return await getVendorsAction(currentSearchParams); + }, [currentSearchParams]); + + // Use SWR to fetch vendors with polling when onboarding is active + const { data: vendorsData } = useSWR(swrKey, fetcher, { + fallbackData: { data: initialVendors, pageCount: initialPageCount }, + refreshInterval: isActive ? 1000 : 0, // Poll every 1 second when onboarding is active + revalidateOnFocus: false, + revalidateOnReconnect: true, + keepPreviousData: true, + }); + + const vendors = vendorsData?.data || initialVendors; + const pageCount = vendorsData?.pageCount ?? initialPageCount; + + // Check if all vendors are done assessing + const allVendorsDoneAssessing = useMemo(() => { + // If no vendors exist yet, we're not done + if (vendors.length === 0) { + // But check if there are vendors in metadata that should exist + if (itemsInfo.length > 0) return false; + return false; + } + + // Check if we're still creating vendors by comparing DB count with expected total + // If progress.total exists and vendors.length < progress.total, we're still creating + if (progress && vendors.length < progress.total) { + return false; + } + + // If there are pending/processing vendors in metadata that aren't in DB yet, we're not done + const hasPendingVendors = itemsInfo.some((item) => { + const status = itemStatuses[item.id]; + return ( + (status === 'pending' || + status === 'processing' || + status === 'created' || + status === 'assessing') && + !vendors.some((v) => v.id === item.id) + ); + }); + + if (hasPendingVendors) return false; + + // Check if all vendors in DB are either: + // 1. Completed in metadata (status === 'completed') + // 2. Assessed in database (status === 'assessed') + const allDbVendorsDone = vendors.every((vendor) => { + const metadataStatus = itemStatuses[vendor.id]; + return metadataStatus === 'completed' || vendor.status === 'assessed'; + }); + + // Also check if there are any vendors in metadata that are still assessing + const hasAssessingVendors = Object.values(itemStatuses).some( + (status) => status === 'assessing' || status === 'processing', + ); - // Resolve the promise data here - const [{ data: vendors, pageCount }, assignees] = React.use(promises); + return allDbVendorsDone && !hasAssessingVendors; + }, [vendors, itemStatuses, itemsInfo, progress]); - // Define columns memoized - const memoizedColumns = React.useMemo(() => columns, []); + // Merge DB vendors with metadata vendors (pending ones) + const mergedVendors = useMemo(() => { + const dbVendorIds = new Set(vendors.map((v) => v.id)); + + // Mark vendors in DB as "assessing" if they're not_assessed and onboarding is active + // Don't mark as assessing if vendor is already assessed (resolved) + const vendorsWithStatus = vendors.map((vendor) => { + const metadataStatus = itemStatuses[vendor.id]; + // If vendor exists in DB but status is not_assessed and onboarding is active, it's being assessed + // Only mark as assessing if status is not_assessed (not assessed) + if (vendor.status === 'not_assessed' && isActive && onboardingRunId && !metadataStatus) { + return { ...vendor, isAssessing: true }; + } + return vendor; + }); + + const pendingVendors: VendorRow[] = itemsInfo + .filter((item) => { + // Only show items that are pending/processing and not yet in DB + const status = itemStatuses[item.id]; + return ( + (status === 'pending' || status === 'processing') && + !dbVendorIds.has(item.id) && + !item.id.startsWith('temp_') + ); + }) + .map((item) => { + // Create a placeholder vendor row for pending items + const status = itemStatuses[item.id]; + return { + id: item.id, + name: item.name, + description: 'Being researched and created by AI...', + category: 'other' as const, + status: 'not_assessed' as const, + inherentProbability: 'very_unlikely' as const, + inherentImpact: 'insignificant' as const, + residualProbability: 'very_unlikely' as const, + residualImpact: 'insignificant' as const, + website: null, + organizationId: orgId || '', + assigneeId: null, + assignee: null, + createdAt: new Date(), + updatedAt: new Date(), + isPending: true, + } as VendorRow; + }); + + // Also handle temp IDs (vendors being created) + const tempVendors: VendorRow[] = itemsInfo + .filter((item) => item.id.startsWith('temp_')) + .map((item) => { + const status = itemStatuses[item.id]; + return { + id: item.id, + name: item.name, + description: 'Being researched and created by AI...', + category: 'other' as const, + status: 'not_assessed' as const, + inherentProbability: 'very_unlikely' as const, + inherentImpact: 'insignificant' as const, + residualProbability: 'very_unlikely' as const, + residualImpact: 'insignificant' as const, + website: null, + organizationId: orgId || '', + assigneeId: null, + assignee: null, + createdAt: new Date(), + updatedAt: new Date(), + isPending: true, + } as VendorRow; + }); + + return [...vendorsWithStatus, ...pendingVendors, ...tempVendors]; + }, [vendors, itemsInfo, itemStatuses, orgId, isActive, onboardingRunId]); + + const columns = useMemo[]>(() => getColumns(orgId ?? ''), [orgId]); const { table } = useDataTable({ - data: vendors, - columns: memoizedColumns, - pageCount: pageCount, + data: mergedVendors, + columns, + pageCount, getRowId: (row) => row.id, initialState: { pagination: { - pageIndex: 0, pageSize: 50, + pageIndex: 0, }, - sorting: [{ id: 'name', desc: true }], + sorting: [{ id: 'name', desc: false }], + columnPinning: { right: ['delete-vendor'] }, }, shallow: false, clearOnDefault: true, }); + const getRowProps = useMemo( + () => (vendor: VendorRow) => { + const status = itemStatuses[vendor.id] || (vendor.isPending ? 'pending' : undefined); + const isAssessing = vendor.isAssessing || status === 'assessing'; + const isBlocked = + (status && + ACTIVE_STATUSES.includes(status as 'pending' | 'processing' | 'created' | 'assessing')) || + isAssessing; + + if (!isBlocked) { + return {}; + } + + return { + disabled: true, + className: + 'relative bg-muted/40 opacity-70 pointer-events-none after:absolute after:inset-0 after:bg-background/40 after:content-[""] after:animate-pulse', + }; + }, + [itemStatuses], + ); + + // Calculate actual assessment progress + const assessmentProgress = useMemo(() => { + if (!progress || !itemsInfo.length) { + return null; + } + + // Count vendors that are completed (either 'completed' in metadata or 'assessed' in DB) + const completedCount = vendors.filter((vendor) => { + const metadataStatus = itemStatuses[vendor.id]; + return metadataStatus === 'completed' || vendor.status === 'assessed'; + }).length; + + // Also count vendors in metadata that are completed but not yet in DB + const completedInMetadata = Object.values(itemStatuses).filter( + (status) => status === 'completed', + ).length; + + // Total is the max of progress.total, itemsInfo.length, or actual vendors created + const total = Math.max(progress.total, itemsInfo.length, vendors.length); + + // Completed is the max of DB assessed vendors or metadata completed + const completed = Math.max(completedCount, completedInMetadata); + + return { total, completed }; + }, [progress, itemsInfo, vendors, itemStatuses]); + + const isEmpty = mergedVendors.length === 0; + // Show empty state if onboarding is active (even if progress metadata isn't set yet) + const showEmptyState = isEmpty && onboardingRunId && isActive; + + // Prevent flicker: if we're loading onboarding status and have a runId, render null + // Once we know the status, show animation if empty and active, otherwise show table + if (onboardingRunId && isLoading) { + return null; + } + + // Show loading animation instead of table when empty and onboarding is active + if (showEmptyState) { + return ( + <> + + + + ); + } + return ( <> - row.id} rowClickBasePath={`/${orgId}/vendors`}> - - + + row.id} + rowClickBasePath={`/${orgId}/vendors`} + getRowProps={getRowProps} + > + <> + + {isActive && !allVendorsDoneAssessing && ( +
+
+ +
+
+ + {assessmentProgress + ? assessmentProgress.completed === 0 + ? 'Researching and creating vendors' + : assessmentProgress.completed < assessmentProgress.total + ? 'Assessing vendors and generating risk assessments' + : 'Assessing vendors and generating risk assessments' + : progress + ? progress.completed === 0 + ? 'Researching and creating vendors' + : 'Assessing vendors and generating risk assessments' + : 'Researching and creating vendors'} + + + {assessmentProgress + ? assessmentProgress.completed === 0 + ? 'AI is analyzing your organization...' + : `${assessmentProgress.completed}/${assessmentProgress.total} vendors assessed` + : progress + ? progress.completed === 0 + ? 'AI is analyzing your organization...' + : `${progress.completed}/${progress.total} vendors created` + : 'AI is analyzing your organization...'} + +
+
+ )} + +
+
); diff --git a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/vendor-onboarding-context.tsx b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/vendor-onboarding-context.tsx new file mode 100644 index 000000000..7afeb6897 --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/components/vendor-onboarding-context.tsx @@ -0,0 +1,35 @@ +'use client'; + +import { createContext, useContext } from 'react'; +import type { OnboardingItemStatus } from '../../../risk/(overview)/hooks/use-onboarding-status'; + +export type VendorOnboardingStatus = Record; + +interface VendorOnboardingContextValue { + statuses: VendorOnboardingStatus; +} + +const VendorOnboardingContext = createContext(undefined); + +export function VendorOnboardingProvider({ + children, + statuses, +}: { + children: React.ReactNode; + statuses: VendorOnboardingStatus; +}) { + return ( + + {children} + + ); +} + +export function useVendorOnboardingStatus() { + const context = useContext(VendorOnboardingContext); + if (!context) { + return {}; + } + return context.statuses; +} + diff --git a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/hooks/use-onboarding-status.ts b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/hooks/use-onboarding-status.ts new file mode 100644 index 000000000..a7c39322f --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/hooks/use-onboarding-status.ts @@ -0,0 +1,4 @@ +'use client'; + +export { useOnboardingStatus } from '../../../risk/(overview)/hooks/use-onboarding-status'; + diff --git a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/page.tsx b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/page.tsx index 017547cee..fd7e4116a 100644 --- a/apps/app/src/app/(app)/[orgId]/vendors/(overview)/page.tsx +++ b/apps/app/src/app/(app)/[orgId]/vendors/(overview)/page.tsx @@ -1,6 +1,7 @@ import { AppOnboarding } from '@/components/app-onboarding'; import PageWithBreadcrumb from '@/components/pages/PageWithBreadcrumb'; import type { SearchParams } from '@/types'; +import { db } from '@db'; import type { Metadata } from 'next'; import { CreateVendorSheet } from '../components/create-vendor-sheet'; import { VendorsTable } from './components/VendorsTable'; @@ -19,9 +20,13 @@ export default async function Page({ const parsedSearchParams = await vendorsSearchParamsCache.parse(searchParams); - const [vendorsResult, assignees] = await Promise.all([ + const [vendorsResult, assignees, onboarding] = await Promise.all([ getVendors(orgId, parsedSearchParams), getAssignees(orgId), + db.onboarding.findFirst({ + where: { organizationId: orgId }, + select: { triggerJobId: true }, + }), ]); // Helper function to check if the current view is the default, unfiltered one @@ -36,8 +41,12 @@ export default async function Page({ ); } - // Show onboarding only if the view is default/unfiltered and there's no data - if (vendorsResult.data.length === 0 && isDefaultView(parsedSearchParams)) { + const isEmpty = vendorsResult.data.length === 0; + const isDefault = isDefaultView(parsedSearchParams); + const isOnboardingActive = Boolean(onboarding?.triggerJobId); + + // Show AppOnboarding only if empty, default view, AND onboarding is not active + if (isEmpty && isDefault && !isOnboardingActive) { return (
); diff --git a/apps/app/src/app/(app)/[orgId]/vendors/[vendorId]/actions/regenerate-vendor-mitigation.ts b/apps/app/src/app/(app)/[orgId]/vendors/[vendorId]/actions/regenerate-vendor-mitigation.ts index 1d01d08e3..eb21d3d0e 100644 --- a/apps/app/src/app/(app)/[orgId]/vendors/[vendorId]/actions/regenerate-vendor-mitigation.ts +++ b/apps/app/src/app/(app)/[orgId]/vendors/[vendorId]/actions/regenerate-vendor-mitigation.ts @@ -2,6 +2,11 @@ import { authActionClient } from '@/actions/safe-action'; import { generateVendorMitigation } from '@/jobs/tasks/onboarding/generate-vendor-mitigation'; +import { + findCommentAuthor, + type PolicyContext, +} from '@/jobs/tasks/onboarding/onboard-organization-helpers'; +import { db } from '@db'; import { tasks } from '@trigger.dev/sdk'; import { z } from 'zod'; @@ -26,9 +31,30 @@ export const regenerateVendorMitigationAction = authActionClient throw new Error('No active organization'); } + const organizationId = session.activeOrganizationId; + + const [author, policyRows] = await Promise.all([ + findCommentAuthor(organizationId), + db.policy.findMany({ + where: { organizationId }, + select: { name: true, description: true }, + }), + ]); + + if (!author) { + throw new Error('No eligible author found to regenerate the mitigation'); + } + + const policies: PolicyContext[] = policyRows.map((policy) => ({ + name: policy.name, + description: policy.description, + })); + await tasks.trigger('generate-vendor-mitigation', { - organizationId: session.activeOrganizationId, + organizationId, vendorId, + authorId: author.id, + policies, }); return { success: true }; diff --git a/apps/app/src/components/data-table/data-table.tsx b/apps/app/src/components/data-table/data-table.tsx index 642cf56ad..1899bd5a8 100644 --- a/apps/app/src/components/data-table/data-table.tsx +++ b/apps/app/src/components/data-table/data-table.tsx @@ -14,6 +14,7 @@ interface DataTableProps extends React.ComponentProps<'div'> { rowClickBasePath?: string; tableId?: string; onRowClick?: (row: TData) => void; + getRowProps?: (row: TData) => { disabled?: boolean; className?: string }; } export function DataTable({ @@ -25,6 +26,7 @@ export function DataTable({ rowClickBasePath, tableId, onRowClick, + getRowProps, ...props }: DataTableProps) { const router = useRouter(); @@ -45,8 +47,8 @@ export function DataTable({ return (
{children} -
- +
+
{table.getHeaderGroups().map((headerGroup) => ( @@ -74,33 +76,47 @@ export function DataTable({ {filteredRows.length ? ( - filteredRows.map((row) => ( - handleRowClick(row.original) : undefined} - > - {row.getVisibleCells().map((cell, index) => { - return ( - - {flexRender(cell.column.columnDef.cell, cell.getContext())} - - ); - })} - - )) + filteredRows.map((row) => { + const customRowProps = getRowProps?.(row.original); + const isDisabled = Boolean(customRowProps?.disabled); + const rowClassName = cn( + isRowClickable && 'hover:bg-muted/50 cursor-pointer', + isDisabled && 'pointer-events-none cursor-not-allowed opacity-60', + customRowProps?.className, + ); + + return ( + handleRowClick(row.original) : undefined + } + > + {row.getVisibleCells().map((cell, index) => { + return ( + + {flexRender(cell.column.columnDef.cell, cell.getContext())} + + ); + })} + + ); + }) ) : ( + {/* Main Animation Container */} +
+ {/* Background Grid Pattern */} +
+
+ {Array.from({ length: 16 }).map((_, i) => ( +
+ ))} +
+
+ + {/* Floating Item Cards Animation */} +
+ {[0, 1, 2, 3].map((index) => ( + + {/* Item Card */} +
+
+ {/* Icon */} + + + + + {/* Content Skeleton */} +
+
+
+
+ + {/* Sparkle Effect */} + + + +
+
+ + {/* Arrow/Connection */} + {index < 3 && ( + + )} + + ))} +
+ + {/* Central AI Processing Indicator */} + +
+
+
+ +
+
+ +
+ + {/* Text Content */} + +

{title}

+

{description}

+
+
+ ); +} + diff --git a/apps/app/src/env.mjs b/apps/app/src/env.mjs index 17d65a68a..25e1e707d 100644 --- a/apps/app/src/env.mjs +++ b/apps/app/src/env.mjs @@ -10,6 +10,7 @@ export const env = createEnv({ AUTH_SECRET: z.string(), DATABASE_URL: z.string().min(1), OPENAI_API_KEY: z.string().optional(), + GROQ_API_KEY: z.string().optional(), RESEND_API_KEY: z.string(), UPSTASH_REDIS_REST_URL: z.string().optional(), UPSTASH_REDIS_REST_TOKEN: z.string().optional(), @@ -62,6 +63,7 @@ export const env = createEnv({ AUTH_SECRET: process.env.AUTH_SECRET, DATABASE_URL: process.env.DATABASE_URL, OPENAI_API_KEY: process.env.OPENAI_API_KEY, + GROQ_API_KEY: process.env.GROQ_API_KEY, RESEND_API_KEY: process.env.RESEND_API_KEY, UPSTASH_REDIS_REST_URL: process.env.UPSTASH_REDIS_REST_URL, UPSTASH_REDIS_REST_TOKEN: process.env.UPSTASH_REDIS_REST_TOKEN, diff --git a/apps/app/src/jobs/tasks/onboarding/generate-full-policies.ts b/apps/app/src/jobs/tasks/onboarding/generate-full-policies.ts index 04caad31b..6d45810b7 100644 --- a/apps/app/src/jobs/tasks/onboarding/generate-full-policies.ts +++ b/apps/app/src/jobs/tasks/onboarding/generate-full-policies.ts @@ -5,7 +5,7 @@ import { getOrganizationContext, triggerPolicyUpdates } from './onboard-organiza // v4 queues must be declared in advance const generateFullPoliciesQueue = queue({ name: 'generate-full-policies', - concurrencyLimit: 100, + concurrencyLimit: 50, }); export const generateFullPolicies = task({ diff --git a/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts b/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts index d719ebeb1..fc26e8213 100644 --- a/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts +++ b/apps/app/src/jobs/tasks/onboarding/generate-risk-mitigation.ts @@ -1,5 +1,5 @@ import { RiskStatus, db } from '@db'; -import { logger, queue, task } from '@trigger.dev/sdk'; +import { logger, metadata, queue, task } from '@trigger.dev/sdk'; import axios from 'axios'; import { createRiskMitigationComment, @@ -8,8 +8,8 @@ import { } from './onboard-organization-helpers'; // Queues -const riskMitigationQueue = queue({ name: 'risk-mitigations', concurrencyLimit: 100 }); -const riskMitigationFanoutQueue = queue({ name: 'risk-mitigations-fanout', concurrencyLimit: 100 }); +const riskMitigationQueue = queue({ name: 'risk-mitigations', concurrencyLimit: 50 }); +const riskMitigationFanoutQueue = queue({ name: 'risk-mitigations-fanout', concurrencyLimit: 50 }); export const generateRiskMitigation = task({ id: 'generate-risk-mitigation', @@ -17,39 +17,45 @@ export const generateRiskMitigation = task({ retry: { maxAttempts: 5, }, - run: async (payload: { organizationId: string; riskId: string }) => { - const { organizationId, riskId } = payload; + run: async (payload: { + organizationId: string; + riskId: string; + authorId: string; + policies: PolicyContext[]; + }) => { + const { organizationId, riskId, authorId, policies } = payload; logger.info(`Generating risk mitigation for risk ${riskId} in org ${organizationId}`); - const [risk, policies, author] = await Promise.all([ - db.risk.findFirst({ where: { id: riskId, organizationId } }), - db.policy.findMany({ where: { organizationId }, select: { name: true, description: true } }), - findCommentAuthor(organizationId), - ]); + const risk = await db.risk.findFirst({ where: { id: riskId, organizationId } }); if (!risk) { logger.warn(`Risk ${riskId} not found in org ${organizationId}`); return; } - if (!author) { - logger.warn( - `No eligible author found for org ${organizationId}; skipping mitigation for risk ${riskId}`, - ); - return; - } + // Mark as processing before generating mitigation + // Update root onboarding task metadata if available (when triggered from onboarding) + // Try root first (onboarding task), then parent (fanout task), then own metadata + const metadataHandle = metadata.root ?? metadata.parent ?? metadata; + metadataHandle.set(`risk_${riskId}_status`, 'processing'); - await createRiskMitigationComment(risk, policies as PolicyContext[], organizationId, author.id); + await createRiskMitigationComment(risk, policies, organizationId, authorId); // Mark risk as closed and assign to owner/admin await db.risk.update({ where: { id: risk.id, organizationId }, data: { status: RiskStatus.closed, - assigneeId: author.id, + assigneeId: authorId, }, }); + // Mark as completed after mitigation is done + // Update root onboarding task metadata if available + metadataHandle.set(`risk_${riskId}_status`, 'completed'); + metadataHandle.increment('risksCompleted', 1); + metadataHandle.decrement('risksRemaining', 1); + // Revalidate only the risk detail page in the individual job try { const detailPath = `/${organizationId}/risk/${riskId}`; @@ -81,15 +87,37 @@ export const generateRiskMitigationsForOrg = task({ const { organizationId } = payload; logger.info(`Fan-out risk mitigations for org ${organizationId}`); - const risks = await db.risk.findMany({ where: { organizationId } }); + const [risks, policyRows, author] = await Promise.all([ + db.risk.findMany({ where: { organizationId } }), + db.policy.findMany({ + where: { organizationId }, + select: { name: true, description: true }, + }), + findCommentAuthor(organizationId), + ]); + if (risks.length === 0) { logger.info(`No risks found for org ${organizationId}`); return; } + if (!author) { + logger.warn( + `No onboarding author found for org ${organizationId}; skipping risk mitigations`, + ); + return; + } + + const policies = policyRows.map((p) => ({ name: p.name, description: p.description })); + await generateRiskMitigation.batchTrigger( risks.map((r) => ({ - payload: { organizationId, riskId: r.id }, + payload: { + organizationId, + riskId: r.id, + authorId: author.id, + policies, + }, concurrencyKey: `${organizationId}:${r.id}`, })), ); diff --git a/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts b/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts index 134f18691..e1aa5776b 100644 --- a/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts +++ b/apps/app/src/jobs/tasks/onboarding/generate-vendor-mitigation.ts @@ -1,5 +1,5 @@ import { VendorStatus, db } from '@db'; -import { logger, queue, task } from '@trigger.dev/sdk'; +import { logger, metadata, queue, task } from '@trigger.dev/sdk'; import axios from 'axios'; import { createVendorRiskComment, @@ -8,10 +8,10 @@ import { } from './onboard-organization-helpers'; // Queues -const vendorMitigationQueue = queue({ name: 'vendor-risk-mitigations', concurrencyLimit: 100 }); +const vendorMitigationQueue = queue({ name: 'vendor-risk-mitigations', concurrencyLimit: 50 }); const vendorMitigationFanoutQueue = queue({ name: 'vendor-risk-mitigations-fanout', - concurrencyLimit: 100, + concurrencyLimit: 50, }); export const generateVendorMitigation = task({ @@ -20,39 +20,45 @@ export const generateVendorMitigation = task({ retry: { maxAttempts: 5, }, - run: async (payload: { organizationId: string; vendorId: string }) => { - const { organizationId, vendorId } = payload; + run: async (payload: { + organizationId: string; + vendorId: string; + authorId: string; + policies: PolicyContext[]; + }) => { + const { organizationId, vendorId, authorId, policies } = payload; logger.info(`Generating vendor mitigation for vendor ${vendorId} in org ${organizationId}`); - const [vendor, policies, author] = await Promise.all([ - db.vendor.findFirst({ where: { id: vendorId, organizationId } }), - db.policy.findMany({ where: { organizationId }, select: { name: true, description: true } }), - findCommentAuthor(organizationId), - ]); + const vendor = await db.vendor.findFirst({ where: { id: vendorId, organizationId } }); if (!vendor) { logger.warn(`Vendor ${vendorId} not found in org ${organizationId}`); return; } - if (!author) { - logger.warn( - `No eligible author found for org ${organizationId}; skipping mitigation for vendor ${vendorId}`, - ); - return; - } + // Mark as processing before generating mitigation + // Update root onboarding task metadata if available (when triggered from onboarding) + // Try root first (onboarding task), then parent (fanout task), then own metadata + const metadataHandle = metadata.root ?? metadata.parent ?? metadata; + metadataHandle.set(`vendor_${vendorId}_status`, 'processing'); - await createVendorRiskComment(vendor, policies as PolicyContext[], organizationId, author.id); + await createVendorRiskComment(vendor, policies, organizationId, authorId); // Mark vendor as assessed and assign to owner/admin await db.vendor.update({ where: { id: vendor.id, organizationId }, data: { status: VendorStatus.assessed, - assigneeId: author.id, + assigneeId: authorId, }, }); + // Mark as completed after mitigation is done + // Update root onboarding task metadata if available + metadataHandle.set(`vendor_${vendorId}_status`, 'completed'); + metadataHandle.increment('vendorsCompleted', 1); + metadataHandle.decrement('vendorsRemaining', 1); + // Revalidate the vendor detail page so the new comment shows up try { const detailPath = `/${organizationId}/vendors/${vendorId}`; @@ -74,15 +80,37 @@ export const generateVendorMitigationsForOrg = task({ const { organizationId } = payload; logger.info(`Fan-out vendor mitigations for org ${organizationId}`); - const vendors = await db.vendor.findMany({ where: { organizationId } }); + const [vendors, policyRows, author] = await Promise.all([ + db.vendor.findMany({ where: { organizationId } }), + db.policy.findMany({ + where: { organizationId }, + select: { name: true, description: true }, + }), + findCommentAuthor(organizationId), + ]); + if (vendors.length === 0) { logger.info(`No vendors found for org ${organizationId}`); return; } + if (!author) { + logger.warn( + `No onboarding author found for org ${organizationId}; skipping vendor mitigations`, + ); + return; + } + + const policies = policyRows.map((p) => ({ name: p.name, description: p.description })); + await generateVendorMitigation.batchTrigger( vendors.map((v) => ({ - payload: { organizationId, vendorId: v.id }, + payload: { + organizationId, + vendorId: v.id, + authorId: author.id, + policies, + }, concurrencyKey: `${organizationId}:${v.id}`, })), ); diff --git a/apps/app/src/jobs/tasks/onboarding/onboard-organization-helpers.ts b/apps/app/src/jobs/tasks/onboarding/onboard-organization-helpers.ts index 28dd8e52a..d010b7f1c 100644 --- a/apps/app/src/jobs/tasks/onboarding/onboard-organization-helpers.ts +++ b/apps/app/src/jobs/tasks/onboarding/onboard-organization-helpers.ts @@ -53,6 +53,14 @@ export type RiskData = { department: Departments; }; +type OrganizationRecord = NonNullable>>; + +type OrganizationContextResult = { + organization: OrganizationRecord; + questionsAndAnswers: ContextItem[]; + policies: { id: string; name: string; description: string | null }[]; +}; + // Baseline risks that must always exist for every organization regardless of frameworks const BASELINE_RISKS: Array<{ title: string; @@ -134,7 +142,9 @@ export async function revalidateOrganizationPath(organizationId: string): Promis /** * Fetches organization data and context */ -export async function getOrganizationContext(organizationId: string) { +export async function getOrganizationContext( + organizationId: string, +): Promise { const [organization, contextHub, policies] = await Promise.all([ db.organization.findUnique({ where: { id: organizationId }, @@ -144,7 +154,7 @@ export async function getOrganizationContext(organizationId: string) { }), db.policy.findMany({ where: { organizationId }, - select: { name: true, description: true }, + select: { id: true, name: true, description: true }, }), ]); @@ -157,7 +167,13 @@ export async function getOrganizationContext(organizationId: string) { answer: context.answer, })); - return { organization, questionsAndAnswers, policies }; + const typedPolicies = policies as Array<{ + id: string; + name: string; + description: string | null; + }>; + + return { organization, questionsAndAnswers, policies: typedPolicies }; } /** @@ -272,9 +288,13 @@ export async function createVendorsFromData( vendorData: VendorData[], organizationId: string, ): Promise { - const createdVendors = []; + // Mark all vendors as processing before creation + vendorData.forEach((_, index) => { + metadata.set(`vendor_temp_${index}_status`, 'processing'); + }); - for (const vendor of vendorData) { + // Check for existing vendors and create new ones concurrently + const vendorPromises = vendorData.map(async (vendor, index) => { const existingVendor = await db.vendor.findMany({ where: { organizationId, @@ -284,7 +304,10 @@ export async function createVendorsFromData( if (existingVendor.length > 0) { logger.info(`Vendor ${vendor.vendor_name} already exists`); - continue; + // Mark as completed if it already exists + const existing = existingVendor[0]; + metadata.set(`vendor_${existing.id}_status`, 'completed'); + return existing; } const createdVendor = await db.vendor.create({ @@ -301,9 +324,24 @@ export async function createVendorsFromData( }, }); - createdVendors.push(createdVendor); logger.info(`Created vendor: ${createdVendor.id} (${createdVendor.name})`); - } + return createdVendor; + }); + + const createdVendors = await Promise.all(vendorPromises); + + // Update metadata with all real IDs and mark as created (will be marked as assessing after all are created) + createdVendors.forEach((vendor) => { + const status = metadata.get(`vendor_${vendor.id}_status`); + if (status === 'completed') { + // Already marked as completed (existing vendor) + return; + } + // New vendor, mark as created + metadata.set(`vendor_${vendor.id}_status`, 'created'); + }); + + // Note: vendorsCompleted is incremented when mitigation is generated, not when created return createdVendors; } @@ -469,15 +507,20 @@ export async function getExistingRisks(organizationId: string) { } /** - * Creates risks from extracted data + * Creates risks from extracted data (AI-generated risks only) */ export async function createRisksFromData( riskData: RiskData[], organizationId: string, ): Promise { - const createdRisks: Risk[] = []; - for (const risk of riskData) { - const createdRisk = await db.risk.create({ + // Mark all risks as processing before creation + riskData.forEach((_, index) => { + metadata.set(`risk_temp_${index}_status`, 'processing'); + }); + + // Create all risks concurrently + const createPromises = riskData.map((risk) => + db.risk.create({ data: { title: risk.risk_name, description: risk.risk_description, @@ -489,16 +532,84 @@ export async function createRisksFromData( treatmentStrategyDescription: risk.risk_treatment_strategy_description, organizationId, }, - }); + }), + ); - createdRisks.push(createdRisk); + const createdRisks = await Promise.all(createPromises); + + // Update metadata with all real IDs and mark as created (will be marked as assessing after all are created) + createdRisks.forEach((createdRisk) => { + metadata.set(`risk_${createdRisk.id}_status`, 'created'); logger.info(`Created risk: ${createdRisk.id} (${createdRisk.title})`); - } + }); + + // Note: risksCompleted is incremented when mitigation is generated, not when created logger.info(`Created ${riskData.length} risks`); return createdRisks; } +/** + * Creates risks from combined baseline and AI-generated data + */ +async function createRisksFromDataWithBaseline( + allRisksToCreate: Array<{ + isBaseline: boolean; + baselineData: (typeof BASELINE_RISKS)[0] | null; + riskData: RiskData | null; + }>, + organizationId: string, +): Promise { + // Mark all risks as processing before creation + allRisksToCreate.forEach((_, index) => { + metadata.set(`risk_temp_${index}_status`, 'processing'); + }); + + // Create all risks concurrently (baseline + AI-generated) + const createPromises = allRisksToCreate.map((risk) => { + if (risk.isBaseline && risk.baselineData) { + return db.risk.create({ + data: { + title: risk.baselineData.title, + description: risk.baselineData.description, + category: risk.baselineData.category, + department: risk.baselineData.department, + status: risk.baselineData.status, + organizationId, + }, + }); + } else if (risk.riskData) { + return db.risk.create({ + data: { + title: risk.riskData.risk_name, + description: risk.riskData.risk_description, + category: risk.riskData.category, + department: risk.riskData.department, + likelihood: risk.riskData.risk_residual_probability, + impact: risk.riskData.risk_residual_impact, + treatmentStrategy: risk.riskData.risk_treatment_strategy, + treatmentStrategyDescription: risk.riskData.risk_treatment_strategy_description, + organizationId, + }, + }); + } + throw new Error('Invalid risk data'); + }); + + const createdRisks = await Promise.all(createPromises); + + // Update metadata with all real IDs and mark as created (will be marked as assessing after all are created) + createdRisks.forEach((createdRisk) => { + metadata.set(`risk_${createdRisk.id}_status`, 'created'); + logger.info(`Created risk: ${createdRisk.id} (${createdRisk.title})`); + }); + + // Note: risksCompleted is incremented when mitigation is generated, not when created + + logger.info(`Created ${allRisksToCreate.length} risks (including baseline)`); + return createdRisks; +} + /** * Gets all policies for an organization */ @@ -524,12 +635,15 @@ export async function triggerPolicyUpdates( metadata.set('policiesCompleted', 0); metadata.set('policiesRemaining', policies.length); // Store policy info for tracking individual policies - metadata.set('policiesInfo', policies.map((p) => ({ id: p.id, name: p.name }))); - - // Initialize individual policy statuses - all start as 'pending' + metadata.set( + 'policiesInfo', + policies.map((p) => ({ id: p.id, name: p.name })), + ); + + // Initialize individual policy statuses - all start as 'queued' // Each policy gets its own metadata key: policy_{id}_status policies.forEach((policy) => { - metadata.set(`policy_${policy.id}_status`, 'pending'); + metadata.set(`policy_${policy.id}_status`, 'queued'); }); await updatePolicy.batchTriggerAndWait( @@ -557,7 +671,7 @@ export async function createVendors( vendorData?: VendorData[], ): Promise { // Extract vendors using AI if not provided - const vendorsToCreate = vendorData || await extractVendorsFromContext(questionsAndAnswers); + const vendorsToCreate = vendorData || (await extractVendorsFromContext(questionsAndAnswers)); // Create vendor records in database const createdVendors = await createVendorsFromData(vendorsToCreate, organizationId); @@ -591,11 +705,11 @@ export async function createRisks( organizationId: string, organizationName: string, ): Promise { - // Ensure baseline risks exist first so the AI doesn't recreate them - await ensureBaselineRisks(organizationId); - - // Get existing risks to avoid duplicates (includes baseline) + // Check if baseline risks need to be created (but don't create them yet) const existingRisks = await getExistingRisks(organizationId); + const baselineRisksToCreate = BASELINE_RISKS.filter( + (base) => !existingRisks.some((r) => r.title === base.title), + ); // Extract risks using AI const riskData = await extractRisksFromContext( @@ -604,9 +718,51 @@ export async function createRisks( existingRisks, ); - // Create risk records in database - const risks = await createRisksFromData(riskData, organizationId); - return risks; + // Combine baseline risks and AI-generated risks for tracking + const allRisksToCreate = [ + ...baselineRisksToCreate.map((base) => ({ + isBaseline: true, + baselineData: base, + riskData: null as RiskData | null, + })), + ...riskData.map((risk) => ({ + isBaseline: false, + baselineData: null as (typeof BASELINE_RISKS)[0] | null, + riskData: risk, + })), + ]; + + // Track all risks immediately as "pending" before creation + if (allRisksToCreate.length > 0) { + metadata.set('risksTotal', allRisksToCreate.length); + metadata.set('risksCompleted', 0); + metadata.set('risksRemaining', allRisksToCreate.length); + // Use temporary IDs based on index until we have real IDs + metadata.set( + 'risksInfo', + allRisksToCreate.map((r, index) => ({ + id: `temp_${index}`, + name: r.isBaseline ? r.baselineData!.title : r.riskData!.risk_name, + })), + ); + // Mark all as pending initially + allRisksToCreate.forEach((_, index) => { + metadata.set(`risk_temp_${index}_status`, 'pending'); + }); + } + + // Create all risks together (baseline + AI-generated) in one batch + const createdRisks = await createRisksFromDataWithBaseline(allRisksToCreate, organizationId); + + // Update tracking with real risk IDs + if (createdRisks.length > 0) { + metadata.set( + 'risksInfo', + createdRisks.map((r) => ({ id: r.id, name: r.title })), + ); + } + + return createdRisks; } /** diff --git a/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts b/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts index f7c8935bd..c7bc12638 100644 --- a/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts +++ b/apps/app/src/jobs/tasks/onboarding/onboard-organization.ts @@ -12,7 +12,7 @@ import { } from './onboard-organization-helpers'; // v4 queues must be declared in advance -const onboardOrgQueue = queue({ name: 'onboard-organization', concurrencyLimit: 100 }); +const onboardOrgQueue = queue({ name: 'onboard-organization', concurrencyLimit: 50 }); export const onboardOrganization = task({ id: 'onboard-organization', @@ -31,9 +31,32 @@ export const onboardOrganization = task({ try { // Get organization context - const { organization, questionsAndAnswers, policies } = await getOrganizationContext( + const { + organization, + questionsAndAnswers, + policies, + }: Awaited> = await getOrganizationContext( payload.organizationId, ); + const policyList = policies ?? []; + // Initialize policy metadata immediately so UI can reflect pending status + if (policyList.length > 0) { + metadata.set('policiesTotal', policyList.length); + metadata.set('policiesCompleted', 0); + metadata.set('policiesRemaining', policyList.length); + metadata.set( + 'policiesInfo', + policyList.map((policy) => ({ id: policy.id, name: policy.name })), + ); + policyList.forEach((policy) => { + metadata.set(`policy_${policy.id}_status`, 'queued'); + }); + } else { + metadata.set('policiesTotal', 0); + metadata.set('policiesCompleted', 0); + metadata.set('policiesRemaining', 0); + metadata.set('policiesInfo', []); + } const frameworkInstances = await db.frameworkInstance.findMany({ where: { @@ -114,19 +137,18 @@ export const onboardOrganization = task({ } // Create vendors (pass extracted data to avoid re-extraction) + // Tracking is handled inside createVendors -> createVendorsFromData const vendors = await createVendors(questionsAndAnswers, payload.organizationId, vendorData); - // Update tracking with real vendor IDs and mark as completed + // Update tracking with real vendor IDs (tracking during creation uses temp IDs) if (vendors.length > 0) { - metadata.set('vendorsCompleted', vendors.length); - metadata.set('vendorsRemaining', 0); metadata.set( 'vendorsInfo', vendors.map((v) => ({ id: v.id, name: v.name })), ); - // Mark all as completed + // Mark all created vendors as "assessing" since they need mitigation vendors.forEach((vendor) => { - metadata.set(`vendor_${vendor.id}_status`, 'completed'); + metadata.set(`vendor_${vendor.id}_status`, 'assessing'); }); } @@ -142,25 +164,17 @@ export const onboardOrganization = task({ }, ); - // Create risks + // Create risks (tracking is handled inside createRisks) const risks = await createRisks( questionsAndAnswers, payload.organizationId, organization.name, ); - // Track risks with metadata for real-time tracking + // Mark all created risks as "assessing" since they need mitigation if (risks.length > 0) { - metadata.set('risksTotal', risks.length); - metadata.set('risksCompleted', risks.length); - metadata.set('risksRemaining', 0); - metadata.set( - 'risksInfo', - risks.map((r) => ({ id: r.id, name: r.title })), - ); - // All risks are created immediately, so mark them all as completed risks.forEach((risk) => { - metadata.set(`risk_${risk.id}_status`, 'completed'); + metadata.set(`risk_${risk.id}_status`, 'assessing'); }); } @@ -168,9 +182,7 @@ export const onboardOrganization = task({ metadata.set('risk', true); // Get policy count for the step message - const policyCount = await db.policy.count({ - where: { organizationId: payload.organizationId }, - }); + const policyCount = policyList.length; metadata.set('currentStep', `Tailoring Policies... (0/${policyCount})`); // Fan-out risk mitigations as separate jobs diff --git a/apps/app/src/jobs/tasks/onboarding/update-policy.ts b/apps/app/src/jobs/tasks/onboarding/update-policy.ts index 97755c551..8cf2ea475 100644 --- a/apps/app/src/jobs/tasks/onboarding/update-policy.ts +++ b/apps/app/src/jobs/tasks/onboarding/update-policy.ts @@ -7,7 +7,7 @@ if (!process.env.OPENAI_API_KEY) { } // v4: define queue ahead of time -export const updatePolicyQueue = queue({ name: 'update-policy', concurrencyLimit: 100 }); +export const updatePolicyQueue = queue({ name: 'update-policy', concurrencyLimit: 50 }); export const updatePolicy = schemaTask({ id: 'update-policy', @@ -48,10 +48,10 @@ export const updatePolicy = schemaTask({ if (metadata.parent) { // Update this policy's status to completed using individual key metadata.parent.set(`policy_${params.policyId}_status`, 'completed'); - + // Increment completed count metadata.parent.increment('policiesCompleted', 1); - + // Decrement remaining count metadata.parent.increment('policiesRemaining', -1); } diff --git a/apps/app/tsconfig.json b/apps/app/tsconfig.json index 730f9b0ef..13886308c 100644 --- a/apps/app/tsconfig.json +++ b/apps/app/tsconfig.json @@ -1,7 +1,11 @@ { "compilerOptions": { "target": "ES2017", - "lib": ["dom", "dom.iterable", "esnext"], + "lib": [ + "dom", + "dom.iterable", + "esnext" + ], "allowJs": true, "skipLibCheck": true, "strict": true, @@ -11,7 +15,7 @@ "moduleResolution": "bundler", "resolveJsonModule": true, "isolatedModules": true, - "jsx": "preserve", + "jsx": "react-jsx", "incremental": true, "plugins": [ { @@ -19,36 +23,102 @@ } ], "paths": { - "@/*": ["./src/*"], - "@db": ["./prisma"], - "@/jobs": ["./src/jobs"], - "@/jobs/*": ["./src/jobs/*"], - "@comp/email": ["../../packages/email/index.ts"], - "@comp/email/*": ["../../packages/email/*"], - "@comp/utils": ["../../packages/utils/src/index.ts"], - "@comp/utils/*": ["../../packages/utils/src/*"], - "@comp/integrations": ["../../packages/integrations/src/index.ts"], - "@comp/integrations/*": ["../../packages/integrations/src/*"], - "@comp/analytics": ["../../packages/analytics/src/index.ts"], - "@comp/analytics/*": ["../../packages/analytics/src/*"], - "@comp/ui": ["../../packages/ui/src/components/index.ts"], - "@comp/ui/*": ["../../packages/ui/src/components/*"], - "@comp/ui/hooks": ["../../packages/ui/src/hooks/index.ts"], - "@comp/ui/hooks/*": ["../../packages/ui/src/hooks/*"], - "@comp/ui/utils/*": ["../../packages/ui/src/utils/*"], - "@comp/ui/cn": ["../../packages/ui/src/utils/cn.ts"], - "@comp/ui/truncate": ["../../packages/ui/src/utils/truncate.ts"], - "@comp/ui/globals.css": ["../../packages/ui/src/globals.css"], - "@comp/ui/editor.css": ["../../packages/ui/src/editor.css"], - "@comp/ui/tailwind.config": ["../../packages/ui/tailwind.config.ts"], - "@comp/kv": ["../../packages/kv/src/index.ts"], - "@comp/kv/*": ["../../packages/kv/src/*"], - "@comp/tsconfig": ["../../packages/tsconfig/index.ts"], - "@comp/tsconfig/*": ["../../packages/tsconfig/*"], - "@trycompai/email": ["../../packages/email/index.ts"], - "@trycompai/email/*": ["../../packages/email/*"] + "@/*": [ + "./src/*" + ], + "@db": [ + "./prisma" + ], + "@/jobs": [ + "./src/jobs" + ], + "@/jobs/*": [ + "./src/jobs/*" + ], + "@comp/email": [ + "../../packages/email/index.ts" + ], + "@comp/email/*": [ + "../../packages/email/*" + ], + "@comp/utils": [ + "../../packages/utils/src/index.ts" + ], + "@comp/utils/*": [ + "../../packages/utils/src/*" + ], + "@comp/integrations": [ + "../../packages/integrations/src/index.ts" + ], + "@comp/integrations/*": [ + "../../packages/integrations/src/*" + ], + "@comp/analytics": [ + "../../packages/analytics/src/index.ts" + ], + "@comp/analytics/*": [ + "../../packages/analytics/src/*" + ], + "@comp/ui": [ + "../../packages/ui/src/components/index.ts" + ], + "@comp/ui/*": [ + "../../packages/ui/src/components/*" + ], + "@comp/ui/hooks": [ + "../../packages/ui/src/hooks/index.ts" + ], + "@comp/ui/hooks/*": [ + "../../packages/ui/src/hooks/*" + ], + "@comp/ui/utils/*": [ + "../../packages/ui/src/utils/*" + ], + "@comp/ui/cn": [ + "../../packages/ui/src/utils/cn.ts" + ], + "@comp/ui/truncate": [ + "../../packages/ui/src/utils/truncate.ts" + ], + "@comp/ui/globals.css": [ + "../../packages/ui/src/globals.css" + ], + "@comp/ui/editor.css": [ + "../../packages/ui/src/editor.css" + ], + "@comp/ui/tailwind.config": [ + "../../packages/ui/tailwind.config.ts" + ], + "@comp/kv": [ + "../../packages/kv/src/index.ts" + ], + "@comp/kv/*": [ + "../../packages/kv/src/*" + ], + "@comp/tsconfig": [ + "../../packages/tsconfig/index.ts" + ], + "@comp/tsconfig/*": [ + "../../packages/tsconfig/*" + ], + "@trycompai/email": [ + "../../packages/email/index.ts" + ], + "@trycompai/email/*": [ + "../../packages/email/*" + ] } }, - "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts", "trigger.config.ts"], - "exclude": ["node_modules", ".next"] + "include": [ + "next-env.d.ts", + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + "trigger.config.ts", + ".next/dev/types/**/*.ts" + ], + "exclude": [ + "node_modules", + ".next" + ] } diff --git a/apps/portal/src/app/api/download-agent/scripts.ts b/apps/portal/src/app/api/download-agent/scripts.ts index 11e3d7c6b..64e9dec2f 100644 --- a/apps/portal/src/app/api/download-agent/scripts.ts +++ b/apps/portal/src/app/api/download-agent/scripts.ts @@ -1,10 +1,8 @@ import { getPackageFilename, getReadmeContent, getScriptFilename } from './scripts/common'; import { generateMacScript } from './scripts/mac'; -import { generateWindowsScript } from './scripts/windows'; export { generateMacScript, - generateWindowsScript, getPackageFilename, getReadmeContent, getScriptFilename, diff --git a/apps/portal/src/app/api/download-agent/scripts/index.ts b/apps/portal/src/app/api/download-agent/scripts/index.ts index 613fb7274..78b6f6892 100644 --- a/apps/portal/src/app/api/download-agent/scripts/index.ts +++ b/apps/portal/src/app/api/download-agent/scripts/index.ts @@ -1,3 +1,2 @@ export { getPackageFilename, getReadmeContent, getScriptFilename } from './common'; export { generateMacScript } from './mac'; -export { generateWindowsScript } from './windows'; diff --git a/apps/portal/src/app/api/download-agent/scripts/windows.ts b/apps/portal/src/app/api/download-agent/scripts/windows.ts deleted file mode 100644 index 62183d425..000000000 --- a/apps/portal/src/app/api/download-agent/scripts/windows.ts +++ /dev/null @@ -1,222 +0,0 @@ -import type { ScriptConfig } from '../types'; - -export function generateWindowsScript(config: ScriptConfig): string { - const { orgId, employeeId, fleetDevicePath } = config; - - const script = `@echo off -title CompAI Device Setup -setlocal EnableExtensions EnableDelayedExpansion -color 0A - -REM ========================= -REM Variables -REM ========================= -set "ORG_ID=${orgId}" -set "EMPLOYEE_ID=${employeeId}" -set "PRIMARY_DIR=${fleetDevicePath}" -set "FALLBACK_DIR=C:\\Users\\Public\\CompAI\\Fleet" -set "CHOSEN_DIR=" -set "LOG_FILE=" -set "HAS_ERROR=0" -set "ERRORS=" -set "EXIT_CODE=0" -REM newline token (exactly this 2-line shape) -set "nl=^ -" - -REM --- bootstrap log (updated once CHOSEN_DIR is known) --- -set "LOG_FILE=%~dp0setup.log" - -goto :main - -REM ======================================================= -REM Subroutines (placed AFTER main to avoid early execution) -REM ======================================================= -:log_msg -setlocal EnableDelayedExpansion -set "msg=%~1" -echo [%date% %time%] !msg! ->>"%LOG_FILE%" echo [%date% %time%] !msg! -endlocal & exit /b 0 - -:log_run -setlocal EnableDelayedExpansion -set "cmdline=%*" -echo [%date% %time%] CMD: !cmdline! ->>"%LOG_FILE%" echo [%date% %time%] CMD: !cmdline! -%* -set "rc=!errorlevel!" -if not "!rc!"=="0" ( - echo [%date% %time%] ERR !rc!: !cmdline! - >>"%LOG_FILE%" echo [%date% %time%] ERR !rc!: !cmdline! -) -endlocal & set "LAST_RC=%rc%" -exit /b %LAST_RC% - -REM ========================= -REM Main -REM ========================= -:main -call :log_msg "Script starting" - -REM Admin check -whoami /groups | find "S-1-16-12288" >nul 2>&1 -if errorlevel 1 ( - color 0E - echo This script must be run as Administrator. - echo Please right-click the file and select "Run as administrator". - echo. - echo Press any key to exit, then try again with Administrator privileges. - pause - exit /b 5 -) - -REM Relaunch persistent window -if not "%PERSIST%"=="1" ( - set "PERSIST=1" - call :log_msg "Re-launching in a persistent window" - start "CompAI Device Setup" cmd /k "%~f0 %*" - exit /b -) - -call :log_msg "Running with administrator privileges" -call :log_msg "Current directory: %cd%" -call :log_msg "Script path: %~f0" -call :log_msg "Switching working directory to script folder" -cd /d "%~dp0" -call :log_msg "New current directory: %cd%" -echo. - -REM Choose writable directory -call :log_msg "Choosing destination directory; primary=%PRIMARY_DIR% fallback=%FALLBACK_DIR%" -if exist "%PRIMARY_DIR%\\*" set "CHOSEN_DIR=%PRIMARY_DIR%" -if not defined CHOSEN_DIR call :log_run mkdir "%PRIMARY_DIR%" -if not defined CHOSEN_DIR if exist "%PRIMARY_DIR%\\*" set "CHOSEN_DIR=%PRIMARY_DIR%" - -if not defined CHOSEN_DIR call :log_msg "Primary not available; trying fallback" -if not defined CHOSEN_DIR if exist "%FALLBACK_DIR%\\*" set "CHOSEN_DIR=%FALLBACK_DIR%" -if not defined CHOSEN_DIR call :log_run mkdir "%FALLBACK_DIR%" -if not defined CHOSEN_DIR if exist "%FALLBACK_DIR%\\*" set "CHOSEN_DIR=%FALLBACK_DIR%" - -if not defined CHOSEN_DIR ( - color 0E - call :log_msg "WARNING: No writable directory found" - echo Primary attempted: "%PRIMARY_DIR%" - echo Fallback attempted: "%FALLBACK_DIR%" - echo [%date% %time%] No writable directory found. Primary: %PRIMARY_DIR%, Fallback: %FALLBACK_DIR% >> "%~dp0setup.log" - set "LOG_FILE=%~dp0setup.log" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- No writable directory found (Primary: %PRIMARY_DIR%, Fallback: %FALLBACK_DIR%).!nl!" - set "EXIT_CODE=1" -) else ( - set "MARKER_DIR=%CHOSEN_DIR%" - if not "!MARKER_DIR:~-1!"=="\\" set "MARKER_DIR=!MARKER_DIR!\\" - - REM switch the log file to the chosen directory, carry over bootstrap logs - set "FINAL_LOG=!MARKER_DIR!setup.log" - if /i not "%LOG_FILE%"=="%FINAL_LOG%" ( - call :log_msg "Switching log to !FINAL_LOG!" - if exist "%LOG_FILE%" type "%LOG_FILE%" >> "!FINAL_LOG!" & del "%LOG_FILE%" - set "LOG_FILE=!FINAL_LOG!" - ) - call :log_msg "Using directory: !MARKER_DIR!" -) -echo Logs will be written to: !LOG_FILE! -echo. - -REM Write marker files -if defined CHOSEN_DIR ( - call :log_msg "Writing organization marker file" - call :log_msg "Preparing to write org marker to !MARKER_DIR!!ORG_ID!" - call :log_run cmd /c "(echo %ORG_ID%) > \"!MARKER_DIR!!ORG_ID!\"" - if errorlevel 1 ( - color 0E - call :log_msg "WARNING: Failed writing organization marker file to !MARKER_DIR!" - echo [%date% %time%] Failed writing org marker file >> "%LOG_FILE%" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- Failed writing organization marker file.!nl!" - set "EXIT_CODE=1" - ) else ( - call :log_msg "[OK] Organization marker file: !MARKER_DIR!!ORG_ID!" - ) - - call :log_msg "Writing employee marker file" - call :log_msg "Preparing to write employee marker to !MARKER_DIR!!EMPLOYEE_ID!" - call :log_run cmd /c "(echo %EMPLOYEE_ID%) > \"!MARKER_DIR!!EMPLOYEE_ID!\"" - if errorlevel 1 ( - color 0E - call :log_msg "WARNING: Failed writing employee marker file to !MARKER_DIR!" - echo [%date% %time%] Failed writing employee marker file >> "%LOG_FILE%" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- Failed writing employee marker file.!nl!" - set "EXIT_CODE=1" - ) else ( - call :log_msg "[OK] Employee marker file: !MARKER_DIR!!EMPLOYEE_ID!" - ) -) - -REM Permissions -if defined CHOSEN_DIR ( - call :log_msg "Setting permissions on marker directory" - call :log_run icacls "!MARKER_DIR!" /inheritance:e - - call :log_msg "Granting read to SYSTEM and Administrators on org marker" - call :log_run icacls "!MARKER_DIR!!ORG_ID!" /grant *S-1-5-18:R *S-1-5-32-544:R - - call :log_msg "Granting read to SYSTEM and Administrators on employee marker" - call :log_run icacls "!MARKER_DIR!!EMPLOYEE_ID!" /grant *S-1-5-18:R *S-1-5-32-544:R -) - -REM Verify -echo. -echo Verifying markers... -if defined CHOSEN_DIR ( - call :log_msg "Verifying marker exists: !MARKER_DIR!!EMPLOYEE_ID!" - if not exist "!MARKER_DIR!!EMPLOYEE_ID!" ( - color 0E - call :log_msg "WARNING: Employee marker file missing at !MARKER_DIR!!EMPLOYEE_ID!" - echo [%date% %time%] Verification failed: employee marker file missing >> "!LOG_FILE!" - set "HAS_ERROR=1" - set "ERRORS=!ERRORS!- Employee marker file missing at !MARKER_DIR!!EMPLOYEE_ID!!.!nl!" - set "EXIT_CODE=2" - ) else ( - call :log_msg "[OK] Employee marker file present: !MARKER_DIR!!EMPLOYEE_ID!" - ) -) -rem Skipping registry checks per request - -REM Result / Exit -echo. -echo ------------------------------------------------------------ -if "%HAS_ERROR%"=="0" ( - color 0A - echo RESULT: SUCCESS - echo Setup completed successfully for %EMPLOYEE_ID%. - if defined CHOSEN_DIR echo Files created in: !CHOSEN_DIR! - echo Log file: !LOG_FILE! - call :log_msg "RESULT: SUCCESS" -) else ( - color 0C - echo RESULT: COMPLETED WITH ISSUES - echo One or more steps did not complete successfully. Details: - echo. - echo !ERRORS! - echo. - echo Next steps: - echo - Take a screenshot of this window. - echo - Attach the log file from: !LOG_FILE! - echo - Share both with your CompAI support contact. - call :log_msg "RESULT: COMPLETED WITH ISSUES (exit=%EXIT_CODE%)" -) -echo ------------------------------------------------------------ -echo. -echo Press any key to close this window. This will not affect installation. -pause -if "%HAS_ERROR%"=="0" (exit /b 0) else (exit /b %EXIT_CODE%) - -REM End of main -goto :eof -`; - - return script.replace(/\n/g, '\r\n'); -} diff --git a/bun.lock b/bun.lock index 68ac8e758..1ef558a93 100644 --- a/bun.lock +++ b/bun.lock @@ -76,8 +76,8 @@ "@nestjs/platform-express": "^11.1.5", "@nestjs/swagger": "^11.2.0", "@prisma/client": "^6.13.0", + "@react-email/components": "^0.0.41", "@trycompai/db": "^1.3.17", - "@trycompai/email": "workspace:*", "archiver": "^7.0.1", "axios": "^1.12.2", "better-auth": "^1.3.27", @@ -89,6 +89,8 @@ "nanoid": "^5.1.6", "pdf-lib": "^1.17.1", "prisma": "^6.13.0", + "react": "^19.1.1", + "react-dom": "^19.1.0", "reflect-metadata": "^0.2.2", "resend": "^6.4.2", "rxjs": "^7.8.1", @@ -328,6 +330,21 @@ "typescript": "^5.8.3", }, }, + "packages/db": { + "name": "@trycompai/db", + "version": "1.3.17", + "dependencies": { + "@prisma/client": "^6.13.0", + "dotenv": "^16.4.5", + "zod": "^4.1.12", + }, + "devDependencies": { + "@types/node": "^24.2.0", + "prisma": "^6.13.0", + "ts-node": "^10.9.2", + "typescript": "^5.9.2", + }, + }, "packages/email": { "name": "@trycompai/email", "version": "1.0.0", @@ -1966,7 +1983,7 @@ "@trycompai/analytics": ["@trycompai/analytics@workspace:packages/analytics"], - "@trycompai/db": ["@trycompai/db@1.3.17", "", { "dependencies": { "@prisma/client": "^6.13.0", "dotenv": "^16.4.5" } }, "sha512-vrKf+/YGdQhpP470xWhysL3RDL8v16pS90AafF718YcRI6mI/XUqlirNMS43+XtOksrc5CHITyBLLOd848bFDA=="], + "@trycompai/db": ["@trycompai/db@workspace:packages/db"], "@trycompai/email": ["@trycompai/email@workspace:packages/email"], @@ -4620,7 +4637,7 @@ "saxes": ["saxes@6.0.0", "", { "dependencies": { "xmlchars": "^2.2.0" } }, "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA=="], - "scheduler": ["scheduler@0.25.0", "", {}, "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA=="], + "scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="], "schema-utils": ["schema-utils@3.3.0", "", { "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } }, "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg=="], @@ -5560,6 +5577,8 @@ "@react-email/components/@react-email/render": ["@react-email/render@1.1.2", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3", "react-promise-suspense": "^0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw=="], + "@react-three/fiber/scheduler": ["scheduler@0.25.0", "", {}, "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA=="], + "@react-three/postprocessing/maath": ["maath@0.6.0", "", { "peerDependencies": { "@types/three": ">=0.144.0", "three": ">=0.144.0" } }, "sha512-dSb2xQuP7vDnaYqfoKzlApeRcR2xtN8/f7WV/TMAkBC8552TwTLtOO0JTcSygkYMjNDPoo6V01jTw/aPi4JrMw=="], "@semantic-release/git/execa": ["execa@5.1.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" } }, "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg=="], @@ -5624,7 +5643,9 @@ "@trigger.dev/sdk/uuid": ["uuid@9.0.1", "", { "bin": { "uuid": "dist/bin/uuid" } }, "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA=="], - "@trycompai/db/dotenv": ["dotenv@16.4.7", "", {}, "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ=="], + "@trycompai/db/dotenv": ["dotenv@16.6.1", "", {}, "sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow=="], + + "@trycompai/db/zod": ["zod@4.1.12", "", {}, "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ=="], "@trycompai/email/resend": ["resend@4.8.0", "", { "dependencies": { "@react-email/render": "1.1.2" } }, "sha512-R8eBOFQDO6dzRTDmaMEdpqrkmgSjPpVXt4nGfWsZdYOet0kqra0xgbvTES6HmCriZEXbmGk3e0DiGIaLFTFSHA=="], @@ -6308,12 +6329,12 @@ "rc/strip-json-comments": ["strip-json-comments@2.0.1", "", {}, "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="], - "react-dom/scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="], - "react-dropzone/file-selector": ["file-selector@2.1.2", "", { "dependencies": { "tslib": "^2.7.0" } }, "sha512-QgXo+mXTe8ljeqUFaX3QVHc5osSItJ/Km+xpocx0aSqWGMSCf6qYs/VnzZgS864Pjn5iceMRFigeAV7AfTlaig=="], "react-promise-suspense/fast-deep-equal": ["fast-deep-equal@2.0.1", "", {}, "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="], + "react-reconciler/scheduler": ["scheduler@0.25.0", "", {}, "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA=="], + "read-cache/pify": ["pify@2.3.0", "", {}, "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog=="], "read-yaml-file/js-yaml": ["js-yaml@4.1.1", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA=="], diff --git a/package.json b/package.json index 23973f52a..8496c16f0 100644 --- a/package.json +++ b/package.json @@ -84,13 +84,7 @@ }, "workspaces": [ "apps/*", - "packages/analytics", - "packages/email", - "packages/integrations", - "packages/kv", - "packages/tsconfig", - "packages/ui", - "packages/utils" + "packages/*" ], "dependencies": { "@types/cheerio": "^1.0.0", @@ -102,4 +96,4 @@ "xlsx": "^0.18.5", "zod": "^3.25.76" } -} \ No newline at end of file +} diff --git a/packages/email/index.ts b/packages/email/index.ts index 91ddd7747..34515beef 100644 --- a/packages/email/index.ts +++ b/packages/email/index.ts @@ -1,12 +1,9 @@ // Email templates -export * from './emails/access-granted'; -export * from './emails/access-reclaim'; export * from './emails/all-policy-notification'; export * from './emails/invite'; export * from './emails/invite-portal'; export * from './emails/magic-link'; export * from './emails/marketing/welcome'; -export * from './emails/nda-signing'; export * from './emails/otp'; export * from './emails/policy-notification'; export * from './emails/waitlist'; diff --git a/turbo.json b/turbo.json index 7eb312055..e1cfe7ada 100644 --- a/turbo.json +++ b/turbo.json @@ -1,90 +1,25 @@ { "$schema": "https://turborepo.org/schema.json", - "globalDependencies": [ - "**/.env" - ], + "globalDependencies": ["**/.env"], "ui": "stream", "tasks": { "prisma:generate": { "cache": false, - "outputs": [ - "prisma/schema.prisma", - "node_modules/.prisma/**" - ] + "outputs": ["prisma/schema.prisma", "node_modules/.prisma/**"] }, "build": { - "dependsOn": [ - "^build", - "prisma:generate" - ], - "env": [ - "AUTH_GOOGLE_ID", - "AUTH_GOOGLE_SECRET", - "AUTH_GITHUB_ID", - "AUTH_GITHUB_SECRET", - "AUTH_SECRET", - "DATABASE_URL", - "OPENAI_API_KEY", - "RESEND_API_KEY", - "UPSTASH_REDIS_REST_URL", - "UPSTASH_REDIS_REST_TOKEN", - "TRIGGER_SECRET_KEY", - "TRIGGER_API_KEY", - "TRIGGER_API_URL", - "REVALIDATION_SECRET", - "VERCEL_ACCESS_TOKEN", - "VERCEL_TEAM_ID", - "VERCEL_PROJECT_ID", - "TRUST_PORTAL_PROJECT_ID", - "NODE_ENV", - "APP_AWS_ACCESS_KEY_ID", - "APP_AWS_SECRET_ACCESS_KEY", - "APP_AWS_REGION", - "APP_AWS_BUCKET_NAME", - "NEXT_PUBLIC_PORTAL_URL", - "FIRECRAWL_API_KEY", - "FLEET_URL", - "FLEET_TOKEN", - "DUB_API_KEY", - "DUB_REFER_URL", - "GA4_API_SECRET", - "GA4_MEASUREMENT_ID", - "LINKEDIN_CONVERSIONS_ACCESS_TOKEN", - "NEXT_PUBLIC_POSTHOG_KEY", - "NEXT_PUBLIC_POSTHOG_HOST", - "NEXT_PUBLIC_IS_DUB_ENABLED", - "NEXT_PUBLIC_GTM_ID", - "NEXT_PUBLIC_LINKEDIN_PARTNER_ID", - "NEXT_PUBLIC_LINKEDIN_CONVERSION_ID", - "NEXT_PUBLIC_GOOGLE_ADS_CONVERSION_LABEL", - "NEXT_PUBLIC_API_URL", - "NEXT_PUBLIC_BETTER_AUTH_URL" - ], - "inputs": [ - "$TURBO_DEFAULT$", - ".env" - ], - "outputs": [ - ".next/**", - "!.next/cache/**", - "next-env.d.ts" - ] + "dependsOn": ["^build", "prisma:generate"], + "inputs": ["$TURBO_DEFAULT$", ".env*"], + "outputs": [".next/**", "!.next/cache/**", "next-env.d.ts", "dist/**"] }, "lint": { - "dependsOn": [ - "^lint" - ] + "dependsOn": ["^lint"] }, "typecheck": { - "outputs": [ - "node_modules/.cache/tsbuildinfo.json" - ] + "outputs": ["node_modules/.cache/tsbuildinfo.json"] }, "dev": { - "inputs": [ - "$TURBO_DEFAULT$", - ".env" - ], + "inputs": ["$TURBO_DEFAULT$", ".env"], "persistent": true, "cache": false }, @@ -96,13 +31,8 @@ }, "test": { "cache": false, - "outputs": [ - "coverage/**" - ], - "inputs": [ - "$TURBO_DEFAULT$", - ".env" - ] + "outputs": ["coverage/**"], + "inputs": ["$TURBO_DEFAULT$", ".env"] } } }