diff --git a/apps/api/.env.example b/apps/api/.env.example index a3288c9a2..290ce034a 100644 --- a/apps/api/.env.example +++ b/apps/api/.env.example @@ -9,6 +9,7 @@ APP_AWS_REGION= APP_AWS_ACCESS_KEY_ID= APP_AWS_SECRET_ACCESS_KEY= APP_AWS_ORG_ASSETS_BUCKET= +APP_AWS_ENDPOINT="" # optional for using services like MinIO DATABASE_URL= diff --git a/apps/api/package.json b/apps/api/package.json index 945051e49..79e11f9d1 100644 --- a/apps/api/package.json +++ b/apps/api/package.json @@ -26,7 +26,7 @@ "@react-email/components": "^0.0.41", "@trigger.dev/build": "4.0.6", "@trigger.dev/sdk": "4.0.6", - "@trycompai/db": "^1.3.20", + "@trycompai/db": "1.3.21", "@trycompai/email": "workspace:*", "@upstash/redis": "^1.34.2", "@upstash/vector": "^1.2.2", diff --git a/apps/api/src/app/s3.ts b/apps/api/src/app/s3.ts index ea96b519e..c20f3facc 100644 --- a/apps/api/src/app/s3.ts +++ b/apps/api/src/app/s3.ts @@ -11,6 +11,7 @@ const logger = new Logger('S3'); const APP_AWS_REGION = process.env.APP_AWS_REGION; const APP_AWS_ACCESS_KEY_ID = process.env.APP_AWS_ACCESS_KEY_ID; const APP_AWS_SECRET_ACCESS_KEY = process.env.APP_AWS_SECRET_ACCESS_KEY; +const APP_AWS_ENDPOINT = process.env.APP_AWS_ENDPOINT; export const BUCKET_NAME = process.env.APP_AWS_BUCKET_NAME; export const APP_AWS_QUESTIONNAIRE_UPLOAD_BUCKET = @@ -37,11 +38,13 @@ try { } s3ClientInstance = new S3Client({ + endpoint: APP_AWS_ENDPOINT || undefined, region: APP_AWS_REGION, credentials: { accessKeyId: APP_AWS_ACCESS_KEY_ID, secretAccessKey: APP_AWS_SECRET_ACCESS_KEY, }, + forcePathStyle: !!APP_AWS_ENDPOINT, }); } catch (error) { logger.error( diff --git a/apps/api/src/assistant-chat/assistant-chat.controller.ts b/apps/api/src/assistant-chat/assistant-chat.controller.ts index f5a861ef7..dbd58220a 100644 --- a/apps/api/src/assistant-chat/assistant-chat.controller.ts +++ b/apps/api/src/assistant-chat/assistant-chat.controller.ts @@ -34,7 +34,10 @@ import type { AssistantChatMessage } from './assistant-chat.types'; export class AssistantChatController { constructor(private readonly assistantChatService: AssistantChatService) {} - private getUserScopedContext(auth: AuthContextType): { organizationId: string; userId: string } { + private getUserScopedContext(auth: AuthContextType): { + organizationId: string; + userId: string; + } { // Defensive checks (should already be guaranteed by HybridAuthGuard + AuthContext decorator) if (!auth.organizationId) { throw new BadRequestException('Organization ID is required'); @@ -69,7 +72,9 @@ export class AssistantChatController { }, }, }) - async getHistory(@AuthContext() auth: AuthContextType): Promise<{ messages: AssistantChatMessage[] }> { + async getHistory( + @AuthContext() auth: AuthContextType, + ): Promise<{ messages: AssistantChatMessage[] }> { const { organizationId, userId } = this.getUserScopedContext(auth); const messages = await this.assistantChatService.getHistory({ @@ -105,7 +110,9 @@ export class AssistantChatController { summary: 'Clear assistant chat history', description: 'Deletes the current user-scoped assistant chat history.', }) - async clearHistory(@AuthContext() auth: AuthContextType): Promise<{ success: true }> { + async clearHistory( + @AuthContext() auth: AuthContextType, + ): Promise<{ success: true }> { const { organizationId, userId } = this.getUserScopedContext(auth); await this.assistantChatService.clearHistory({ @@ -116,5 +123,3 @@ export class AssistantChatController { return { success: true }; } } - - diff --git a/apps/api/src/assistant-chat/assistant-chat.dto.ts b/apps/api/src/assistant-chat/assistant-chat.dto.ts index 9568fbec7..8364161e5 100644 --- a/apps/api/src/assistant-chat/assistant-chat.dto.ts +++ b/apps/api/src/assistant-chat/assistant-chat.dto.ts @@ -1,5 +1,11 @@ import { ApiProperty } from '@nestjs/swagger'; -import { IsArray, IsIn, IsNumber, IsString, ValidateNested } from 'class-validator'; +import { + IsArray, + IsIn, + IsNumber, + IsString, + ValidateNested, +} from 'class-validator'; import { Type } from 'class-transformer'; export class AssistantChatMessageDto { @@ -27,5 +33,3 @@ export class SaveAssistantChatHistoryDto { @Type(() => AssistantChatMessageDto) messages!: AssistantChatMessageDto[]; } - - diff --git a/apps/api/src/assistant-chat/assistant-chat.module.ts b/apps/api/src/assistant-chat/assistant-chat.module.ts index ba781368c..a06068c0c 100644 --- a/apps/api/src/assistant-chat/assistant-chat.module.ts +++ b/apps/api/src/assistant-chat/assistant-chat.module.ts @@ -9,5 +9,3 @@ import { AssistantChatService } from './assistant-chat.service'; providers: [AssistantChatService], }) export class AssistantChatModule {} - - diff --git a/apps/api/src/assistant-chat/assistant-chat.service.ts b/apps/api/src/assistant-chat/assistant-chat.service.ts index 52672cef3..dfd2bd423 100644 --- a/apps/api/src/assistant-chat/assistant-chat.service.ts +++ b/apps/api/src/assistant-chat/assistant-chat.service.ts @@ -17,7 +17,10 @@ type GetAssistantChatKeyParams = { userId: string; }; -const getAssistantChatKey = ({ organizationId, userId }: GetAssistantChatKeyParams): string => { +const getAssistantChatKey = ({ + organizationId, + userId, +}: GetAssistantChatKeyParams): string => { return `assistant-chat:v1:${organizationId}:${userId}`; }; @@ -27,9 +30,13 @@ export class AssistantChatService { * Default TTL is 7 days. This is intended to behave like "session context" * rather than a long-term, searchable archive. */ - private readonly ttlSeconds = Number(process.env.ASSISTANT_CHAT_TTL_SECONDS ?? 60 * 60 * 24 * 7); + private readonly ttlSeconds = Number( + process.env.ASSISTANT_CHAT_TTL_SECONDS ?? 60 * 60 * 24 * 7, + ); - async getHistory(params: GetAssistantChatKeyParams): Promise { + async getHistory( + params: GetAssistantChatKeyParams, + ): Promise { const key = getAssistantChatKey(params); const raw = await assistantChatRedisClient.get(key); const parsed = StoredMessagesSchema.safeParse(raw); @@ -37,7 +44,10 @@ export class AssistantChatService { return parsed.data; } - async saveHistory(params: GetAssistantChatKeyParams, messages: AssistantChatMessage[]): Promise { + async saveHistory( + params: GetAssistantChatKeyParams, + messages: AssistantChatMessage[], + ): Promise { const key = getAssistantChatKey(params); // Always validate before writing to keep the cache shape stable. const validated = StoredMessagesSchema.parse(messages); @@ -49,5 +59,3 @@ export class AssistantChatService { await assistantChatRedisClient.del(key); } } - - diff --git a/apps/api/src/assistant-chat/assistant-chat.types.ts b/apps/api/src/assistant-chat/assistant-chat.types.ts index 223a5316e..e6cdba235 100644 --- a/apps/api/src/assistant-chat/assistant-chat.types.ts +++ b/apps/api/src/assistant-chat/assistant-chat.types.ts @@ -4,5 +4,3 @@ export type AssistantChatMessage = { text: string; createdAt: number; }; - - diff --git a/apps/api/src/assistant-chat/upstash-redis.client.ts b/apps/api/src/assistant-chat/upstash-redis.client.ts index da7d93439..d4f74ef8d 100644 --- a/apps/api/src/assistant-chat/upstash-redis.client.ts +++ b/apps/api/src/assistant-chat/upstash-redis.client.ts @@ -19,7 +19,11 @@ class InMemoryRedis { return record.value as T; } - async set(key: string, value: unknown, options?: { ex?: number }): Promise<'OK'> { + async set( + key: string, + value: unknown, + options?: { ex?: number }, + ): Promise<'OK'> { const expiresAt = options?.ex ? Date.now() + options.ex * 1000 : undefined; this.storage.set(key, { value, expiresAt }); return 'OK'; @@ -32,7 +36,8 @@ class InMemoryRedis { } const hasUpstashConfig = - !!process.env.UPSTASH_REDIS_REST_URL && !!process.env.UPSTASH_REDIS_REST_TOKEN; + !!process.env.UPSTASH_REDIS_REST_URL && + !!process.env.UPSTASH_REDIS_REST_TOKEN; export const assistantChatRedisClient: Pick = hasUpstashConfig @@ -41,5 +46,3 @@ export const assistantChatRedisClient: Pick = token: process.env.UPSTASH_REDIS_REST_TOKEN!, }) : (new InMemoryRedis() as unknown as Pick); - - diff --git a/apps/api/src/attachments/attachments.service.ts b/apps/api/src/attachments/attachments.service.ts index 46561a2d7..67ef446e7 100644 --- a/apps/api/src/attachments/attachments.service.ts +++ b/apps/api/src/attachments/attachments.service.ts @@ -14,6 +14,7 @@ import { import { randomBytes } from 'crypto'; import { AttachmentResponseDto } from '../tasks/dto/task-responses.dto'; import { UploadAttachmentDto } from './upload-attachment.dto'; +import { s3Client } from '@/app/s3'; @Injectable() export class AttachmentsService { @@ -27,20 +28,14 @@ export class AttachmentsService { // Safe to access environment variables directly since they're validated this.bucketName = process.env.APP_AWS_BUCKET_NAME!; - if ( - !process.env.APP_AWS_ACCESS_KEY_ID || - !process.env.APP_AWS_SECRET_ACCESS_KEY - ) { - console.warn('AWS credentials are missing, S3 client may fail'); + if (!s3Client) { + console.error('S3 Client is not initialized. Check AWS S3 configuration.'); + throw new Error( + 'S3 Client is not initialized. Check AWS S3 configuration.', + ); } - this.s3Client = new S3Client({ - region: process.env.APP_AWS_REGION || 'us-east-1', - credentials: { - accessKeyId: process.env.APP_AWS_ACCESS_KEY_ID!, - secretAccessKey: process.env.APP_AWS_SECRET_ACCESS_KEY!, - }, - }); + this.s3Client = s3Client; } /** @@ -385,6 +380,25 @@ export class AttachmentsService { return this.generateSignedUrl(s3Key); } + /** + * Generate presigned download URL with a custom download filename + */ + async getPresignedDownloadUrlWithFilename( + s3Key: string, + downloadFilename: string, + ): Promise { + const sanitizedFilename = this.sanitizeHeaderValue(downloadFilename); + const getCommand = new GetObjectCommand({ + Bucket: this.bucketName, + Key: s3Key, + ResponseContentDisposition: `attachment; filename="${sanitizedFilename}"`, + }); + + return getSignedUrl(this.s3Client, getCommand, { + expiresIn: this.SIGNED_URL_EXPIRY, + }); + } + async getObjectBuffer(s3Key: string): Promise { const getCommand = new GetObjectCommand({ Bucket: this.bucketName, diff --git a/apps/api/src/auth/internal-token.guard.ts b/apps/api/src/auth/internal-token.guard.ts index d0e6ec5e7..a753b0015 100644 --- a/apps/api/src/auth/internal-token.guard.ts +++ b/apps/api/src/auth/internal-token.guard.ts @@ -42,5 +42,3 @@ export class InternalTokenGuard implements CanActivate { return true; } } - - diff --git a/apps/api/src/comments/comment-mention-notifier.service.ts b/apps/api/src/comments/comment-mention-notifier.service.ts index bfcdecbbb..816a86664 100644 --- a/apps/api/src/comments/comment-mention-notifier.service.ts +++ b/apps/api/src/comments/comment-mention-notifier.service.ts @@ -102,7 +102,8 @@ async function buildFallbackCommentContext(params: { }); if (taskItem) { - const parentRoutePath = taskItem.entityType === 'vendor' ? 'vendors' : 'risk'; + const parentRoutePath = + taskItem.entityType === 'vendor' ? 'vendors' : 'risk'; const url = new URL( `${appUrl}/${organizationId}/${parentRoutePath}/${taskItem.entityId}`, ); @@ -291,7 +292,11 @@ export class CommentMentionNotifierService { // Check if user is unsubscribed from comment mention notifications // Note: We'll use 'taskMentions' preference for now, or create a new 'commentMentions' preference - const isUnsubscribed = await isUserUnsubscribed(db, user.email, 'taskMentions'); + const isUnsubscribed = await isUserUnsubscribed( + db, + user.email, + 'taskMentions', + ); if (isUnsubscribed) { this.logger.log( `Skipping mention notification: user ${user.email} is unsubscribed from mentions`, @@ -375,4 +380,3 @@ export class CommentMentionNotifierService { } } } - diff --git a/apps/api/src/comments/comments.service.ts b/apps/api/src/comments/comments.service.ts index d78b66cd5..9aea2b8e7 100644 --- a/apps/api/src/comments/comments.service.ts +++ b/apps/api/src/comments/comments.service.ts @@ -270,7 +270,9 @@ export class CommentsService { // Notify mentioned users if (createCommentDto.content && userId) { - const mentionedUserIds = extractMentionedUserIds(createCommentDto.content); + const mentionedUserIds = extractMentionedUserIds( + createCommentDto.content, + ); if (mentionedUserIds.length > 0) { // Fire-and-forget: notification failures should not block comment creation void this.mentionNotifier.notifyMentionedUsers({ diff --git a/apps/api/src/config/aws.config.ts b/apps/api/src/config/aws.config.ts index 40788e08c..d18cc304a 100644 --- a/apps/api/src/config/aws.config.ts +++ b/apps/api/src/config/aws.config.ts @@ -6,6 +6,7 @@ const awsConfigSchema = z.object({ accessKeyId: z.string().min(1, 'AWS_ACCESS_KEY_ID is required'), secretAccessKey: z.string().min(1, 'AWS_SECRET_ACCESS_KEY is required'), bucketName: z.string().min(1, 'AWS_BUCKET_NAME is required'), + endpoint: z.string().optional(), }); export type AwsConfig = z.infer; @@ -16,6 +17,7 @@ export const awsConfig = registerAs('aws', (): AwsConfig => { accessKeyId: process.env.APP_AWS_ACCESS_KEY_ID || '', secretAccessKey: process.env.APP_AWS_SECRET_ACCESS_KEY || '', bucketName: process.env.APP_AWS_BUCKET_NAME || '', + endpoint: process.env.APP_AWS_ENDPOINT || '', }; // Validate configuration at startup diff --git a/apps/api/src/policies/policies.controller.ts b/apps/api/src/policies/policies.controller.ts index 608c0c541..1020be850 100644 --- a/apps/api/src/policies/policies.controller.ts +++ b/apps/api/src/policies/policies.controller.ts @@ -3,6 +3,7 @@ import { Controller, Delete, Get, + HttpCode, Param, Patch, Post, @@ -77,6 +78,40 @@ export class PoliciesController { }; } + @Get('download-all') + @HttpCode(HttpStatus.OK) + @ApiOperation({ + summary: 'Download all published policies as a single PDF', + description: + 'Generates a PDF bundle containing all published policies with organization branding and returns a signed download URL', + }) + @ApiResponse({ + status: 200, + description: 'Signed URL for PDF bundle returned', + }) + @ApiResponse({ + status: 404, + description: 'No published policies found', + }) + async downloadAllPolicies( + @OrganizationId() organizationId: string, + @AuthContext() authContext: AuthContextType, + ) { + const result = + await this.policiesService.downloadAllPoliciesPdf(organizationId); + + return { + ...result, + authType: authContext.authType, + ...(authContext.userId && { + authenticatedUser: { + id: authContext.userId, + email: authContext.userEmail, + }, + }), + }; + } + @Get(':id') @ApiOperation(POLICY_OPERATIONS.getPolicyById) @ApiParam(POLICY_PARAMS.policyId) diff --git a/apps/api/src/policies/policies.module.ts b/apps/api/src/policies/policies.module.ts index ab2cc2e19..e4e742c3b 100644 --- a/apps/api/src/policies/policies.module.ts +++ b/apps/api/src/policies/policies.module.ts @@ -1,12 +1,14 @@ import { Module } from '@nestjs/common'; +import { AttachmentsModule } from '../attachments/attachments.module'; import { AuthModule } from '../auth/auth.module'; +import { PolicyPdfRendererService } from '../trust-portal/policy-pdf-renderer.service'; import { PoliciesController } from './policies.controller'; import { PoliciesService } from './policies.service'; @Module({ - imports: [AuthModule], + imports: [AuthModule, AttachmentsModule], controllers: [PoliciesController], - providers: [PoliciesService], + providers: [PoliciesService, PolicyPdfRendererService], exports: [PoliciesService], }) export class PoliciesModule {} diff --git a/apps/api/src/policies/policies.service.ts b/apps/api/src/policies/policies.service.ts index 60875cdd8..1227074cf 100644 --- a/apps/api/src/policies/policies.service.ts +++ b/apps/api/src/policies/policies.service.ts @@ -1,6 +1,9 @@ import { Injectable, NotFoundException, Logger } from '@nestjs/common'; import { db } from '@trycompai/db'; import type { Prisma } from '@trycompai/db'; +import { PDFDocument, rgb, StandardFonts } from 'pdf-lib'; +import { AttachmentsService } from '../attachments/attachments.service'; +import { PolicyPdfRendererService } from '../trust-portal/policy-pdf-renderer.service'; import type { CreatePolicyDto } from './dto/create-policy.dto'; import type { UpdatePolicyDto } from './dto/update-policy.dto'; @@ -8,6 +11,11 @@ import type { UpdatePolicyDto } from './dto/update-policy.dto'; export class PoliciesService { private readonly logger = new Logger(PoliciesService.name); + constructor( + private readonly attachmentsService: AttachmentsService, + private readonly pdfRendererService: PolicyPdfRendererService, + ) {} + async findAll(organizationId: string) { try { const policies = await db.policy.findMany({ @@ -247,4 +255,307 @@ export class PoliciesService { throw error; } } + + /** + * Convert hex color to RGB values (0-1 range for pdf-lib) + */ + private hexToRgb(hex: string): { r: number; g: number; b: number } { + const cleanHex = hex.replace('#', ''); + const r = parseInt(cleanHex.substring(0, 2), 16) / 255; + const g = parseInt(cleanHex.substring(2, 4), 16) / 255; + const b = parseInt(cleanHex.substring(4, 6), 16) / 255; + return { r, g, b }; + } + + /** + * Get accent color from organization or use default + */ + private getAccentColor(primaryColor: string | null | undefined): { + r: number; + g: number; + b: number; + } { + // Default project primary color: dark teal/green (#004D3D) + const defaultColor = { r: 0, g: 0.302, b: 0.239 }; + + if (!primaryColor) { + return defaultColor; + } + + const color = this.hexToRgb(primaryColor); + + if ( + Number.isNaN(color.r) || + Number.isNaN(color.g) || + Number.isNaN(color.b) + ) { + this.logger.warn( + `Invalid primary color format, using default: ${primaryColor}`, + ); + return defaultColor; + } + + return color; + } + + /** + * Download all published policies as a single PDF bundle (no watermark) + */ + async downloadAllPoliciesPdf(organizationId: string) { + // Get organization info + const organization = await db.organization.findUnique({ + where: { id: organizationId }, + select: { name: true, primaryColor: true }, + }); + + if (!organization) { + throw new NotFoundException('Organization not found'); + } + + // Get all published policies + const policies = await db.policy.findMany({ + where: { + organizationId, + status: 'published', + isArchived: false, + }, + select: { + id: true, + name: true, + content: true, + pdfUrl: true, + }, + orderBy: [{ lastPublishedAt: 'desc' }, { updatedAt: 'desc' }], + }); + + if (policies.length === 0) { + throw new NotFoundException('No published policies available'); + } + + const mergedPdf = await PDFDocument.create(); + const organizationName = organization.name || 'Organization'; + const accentColor = this.getAccentColor(organization.primaryColor); + + // Embed fonts once before the loop (expensive operation) + const helveticaBold = await mergedPdf.embedFont( + StandardFonts.HelveticaBold, + ); + const helvetica = await mergedPdf.embedFont(StandardFonts.Helvetica); + + // Step 1: Fetch/render all PDFs in parallel (expensive I/O operations) + type PreparedPolicy = { + policy: (typeof policies)[0]; + pdfBuffer: Buffer; + isUploaded: boolean; + }; + + const preparePolicy = async ( + policy: (typeof policies)[0], + ): Promise => { + const hasUploadedPdf = policy.pdfUrl && policy.pdfUrl.trim() !== ''; + + if (hasUploadedPdf) { + try { + const pdfBuffer = await this.attachmentsService.getObjectBuffer( + policy.pdfUrl!, + ); + return { + policy, + pdfBuffer: Buffer.from(pdfBuffer), + isUploaded: true, + }; + } catch (error) { + this.logger.warn( + `Failed to fetch uploaded PDF for policy ${policy.id}, falling back to content rendering`, + error, + ); + } + } + + // Render from content (either no pdfUrl or fetch failed) + const renderedBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( + [{ name: policy.name, content: policy.content }], + undefined, // We'll add org header during merge + organization.primaryColor, + policies.length, + ); + return { policy, pdfBuffer: renderedBuffer, isUploaded: false }; + }; + + const preparedPolicies = await Promise.all(policies.map(preparePolicy)); + + // Step 2: Merge PDFs sequentially (must be sequential for PDFDocument operations) + // Helper to add content-rendered policy to merged PDF + const addContentRenderedPolicy = async ( + policy: (typeof policies)[0], + addOrgHeader: boolean, + ) => { + const renderedBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( + [{ name: policy.name, content: policy.content }], + addOrgHeader ? organizationName : undefined, + organization.primaryColor, + policies.length, + ); + const renderedPdf = await PDFDocument.load(renderedBuffer); + const copiedPages = await mergedPdf.copyPages( + renderedPdf, + renderedPdf.getPageIndices(), + ); + for (const page of copiedPages) { + mergedPdf.addPage(page); + } + }; + + let isFirst = true; + for (const { policy, pdfBuffer, isUploaded } of preparedPolicies) { + if (isUploaded) { + try { + const uploadedPdf = await PDFDocument.load(pdfBuffer, { + ignoreEncryption: true, + }); + + // Rebuild the FIRST page: embed original page into a taller page + const originalFirstPage = uploadedPdf.getPage(0); + const { width, height } = originalFirstPage.getSize(); + + const headerHeight = isFirst ? 120 : 60; + const embeddedFirstPage = await mergedPdf.embedPage(originalFirstPage); + const rebuiltFirstPage = mergedPdf.addPage([ + width, + height + headerHeight, + ]); + + rebuiltFirstPage.drawPage(embeddedFirstPage, { + x: 0, + y: 0, + width, + height, + }); + + let yPos = height + headerHeight - 25; + + if (isFirst) { + rebuiltFirstPage.drawLine({ + start: { x: 20, y: yPos + 8 }, + end: { x: width - 20, y: yPos + 8 }, + thickness: 2, + color: rgb(accentColor.r, accentColor.g, accentColor.b), + }); + + rebuiltFirstPage.drawText(`${organizationName} - All Policies`, { + x: 20, + y: yPos - 14, + size: 14, + font: helveticaBold, + color: rgb(0, 0, 0), + }); + + const generatedDate = new Date().toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + }); + + rebuiltFirstPage.drawText( + `Generated: ${generatedDate} | Total: ${policies.length} policies`, + { + x: width - 180, + y: yPos - 14, + size: 8, + font: helvetica, + color: rgb(0.5, 0.5, 0.5), + }, + ); + + yPos -= 34; + isFirst = false; + } + + rebuiltFirstPage.drawRectangle({ + x: 55, + y: yPos - 40, + width: 10, + height: 26, + color: rgb(accentColor.r, accentColor.g, accentColor.b), + }); + + rebuiltFirstPage.drawText(`POLICY: ${policy.name}`, { + x: 75, + y: yPos - 34, + size: 16, + font: helveticaBold, + color: rgb(0.12, 0.16, 0.23), + }); + + // Remaining pages unchanged (page 2..n) + if (uploadedPdf.getPageCount() > 1) { + const copiedRemainingPages = await mergedPdf.copyPages( + uploadedPdf, + uploadedPdf.getPageIndices().slice(1), + ); + for (const page of copiedRemainingPages) { + mergedPdf.addPage(page); + } + } + } catch (error) { + // PDF is corrupted/malformed, fall back to content rendering + this.logger.warn( + `Failed to parse uploaded PDF for policy ${policy.id}, falling back to content rendering`, + error, + ); + await addContentRenderedPolicy(policy, isFirst); + isFirst = false; + } + } else { + // Content was already rendered, but re-render if first (needs org header) + await addContentRenderedPolicy(policy, isFirst); + isFirst = false; + } + } + + // Add page numbers to all pages in the merged PDF + const pages = mergedPdf.getPages(); + const totalPages = pages.length; + // helvetica font already embedded above + + for (let i = 0; i < totalPages; i++) { + const page = pages[i]; + const { width } = page.getSize(); + const pageNumber = i + 1; + + page.drawText(`Page ${pageNumber} of ${totalPages}`, { + x: width / 2 - 30, + y: 15, + size: 8, + font: helvetica, + color: rgb(0.5, 0.5, 0.5), + }); + } + + const pdfBuffer = Buffer.from(await mergedPdf.save()); + + // Upload to S3 (no watermarking for internal use) + const timestamp = Date.now(); + const key = await this.attachmentsService.uploadToS3( + pdfBuffer, + `policies-bundle-${organizationId}-${timestamp}.pdf`, + 'application/pdf', + organizationId, + 'policy_downloads', + organizationId, + ); + + const downloadUrl = + await this.attachmentsService.getPresignedDownloadUrl(key); + + this.logger.log( + `Generated PDF bundle for organization ${organizationId} with ${policies.length} policies`, + ); + + return { + name: `${organizationName} - All Policies`, + downloadUrl, + policyCount: policies.length, + }; + } } diff --git a/apps/api/src/task-management/task-item-assignment-notifier.service.ts b/apps/api/src/task-management/task-item-assignment-notifier.service.ts index 5abf22dab..a9a44492a 100644 --- a/apps/api/src/task-management/task-item-assignment-notifier.service.ts +++ b/apps/api/src/task-management/task-item-assignment-notifier.service.ts @@ -183,9 +183,9 @@ export class TaskItemAssignmentNotifierService { }, }); - this.logger.log( - `[NOVU] Assignment in-app notification sent to ${assigneeUser.id} for task "${taskTitle}"`, - ); + this.logger.log( + `[NOVU] Assignment in-app notification sent to ${assigneeUser.id} for task "${taskTitle}"`, + ); } catch (error) { this.logger.error( `[NOVU] Failed to send assignment in-app notification to ${assigneeUser.id}:`, diff --git a/apps/api/src/task-management/task-item-mention-notifier.service.ts b/apps/api/src/task-management/task-item-mention-notifier.service.ts index a9c1eeffa..754fde974 100644 --- a/apps/api/src/task-management/task-item-mention-notifier.service.ts +++ b/apps/api/src/task-management/task-item-mention-notifier.service.ts @@ -110,7 +110,11 @@ export class TaskItemMentionNotifierService { } // Check if user is unsubscribed from task mention notifications - const isUnsubscribed = await isUserUnsubscribed(db, user.email, 'taskMentions'); + const isUnsubscribed = await isUserUnsubscribed( + db, + user.email, + 'taskMentions', + ); if (isUnsubscribed) { this.logger.log( `Skipping mention notification: user ${user.email} is unsubscribed from task mentions`, diff --git a/apps/api/src/task-management/task-management.service.ts b/apps/api/src/task-management/task-management.service.ts index 2ef9ac7b0..cb7b5d67a 100644 --- a/apps/api/src/task-management/task-management.service.ts +++ b/apps/api/src/task-management/task-management.service.ts @@ -87,7 +87,9 @@ export class TaskManagementService { if (error instanceof BadRequestException) { throw error; } - throw new InternalServerErrorException('Failed to fetch task items stats'); + throw new InternalServerErrorException( + 'Failed to fetch task items stats', + ); } } diff --git a/apps/api/src/tasks/attachments.service.ts b/apps/api/src/tasks/attachments.service.ts index b8821adb1..868ba040c 100644 --- a/apps/api/src/tasks/attachments.service.ts +++ b/apps/api/src/tasks/attachments.service.ts @@ -15,6 +15,7 @@ import { db } from '@trycompai/db'; import { randomBytes } from 'crypto'; import { AttachmentResponseDto } from './dto/task-responses.dto'; import { UploadAttachmentDto } from './dto/upload-attachment.dto'; +import { s3Client } from '@/app/s3'; @Injectable() export class AttachmentsService { @@ -27,13 +28,15 @@ export class AttachmentsService { // AWS configuration is validated at startup via ConfigModule // Safe to access environment variables directly since they're validated this.bucketName = process.env.APP_AWS_BUCKET_NAME!; - this.s3Client = new S3Client({ - region: process.env.APP_AWS_REGION || 'us-east-1', - credentials: { - accessKeyId: process.env.APP_AWS_ACCESS_KEY_ID!, - secretAccessKey: process.env.APP_AWS_SECRET_ACCESS_KEY!, - }, - }); + + if (!s3Client) { + console.error('S3 Client is not initialized. Check AWS S3 configuration.'); + throw new Error( + 'S3 Client is not initialized. Check AWS S3 configuration.', + ); + } + + this.s3Client = s3Client; } /** diff --git a/apps/api/src/tasks/tasks.controller.ts b/apps/api/src/tasks/tasks.controller.ts index 19dd8fb41..43b7c0daa 100644 --- a/apps/api/src/tasks/tasks.controller.ts +++ b/apps/api/src/tasks/tasks.controller.ts @@ -220,7 +220,12 @@ export class TasksController { } } - return await this.tasksService.updateTasksStatus(organizationId, taskIds, status, parsedReviewDate); + return await this.tasksService.updateTasksStatus( + organizationId, + taskIds, + status, + parsedReviewDate, + ); } // ==================== TASK ATTACHMENTS ==================== diff --git a/apps/api/src/tasks/tasks.service.ts b/apps/api/src/tasks/tasks.service.ts index 61c4e74f7..660332155 100644 --- a/apps/api/src/tasks/tasks.service.ts +++ b/apps/api/src/tasks/tasks.service.ts @@ -139,7 +139,9 @@ export class TasksService { }); if (result.count === 0) { - throw new BadRequestException('No tasks were updated. Check task IDs or organization access.'); + throw new BadRequestException( + 'No tasks were updated. Check task IDs or organization access.', + ); } return { updatedCount: result.count }; diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment-monthly-schedule.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment-monthly-schedule.ts index 6ccce608c..d2c84a817 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment-monthly-schedule.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment-monthly-schedule.ts @@ -89,4 +89,3 @@ export const vendorRiskAssessmentMonthlySchedule = schedules.task({ } }, }); - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment-task.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment-task.ts index 96ad4e5bd..18e0292e0 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment-task.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment-task.ts @@ -33,7 +33,9 @@ type VendorRiskAssessmentResult = { verifyTaskItemId?: string; }; -type VendorRiskAssessmentTaskInput = z.input; +type VendorRiskAssessmentTaskInput = z.input< + typeof vendorRiskAssessmentPayloadSchema +>; function parseVersionNumber(version: string | null | undefined): number { if (!version || !version.startsWith('v')) return 0; @@ -102,7 +104,10 @@ function incrementVersion(currentVersion: string | null | undefined): string { * Otherwise, check if data exists - if not, do research. */ function shouldDoResearch( - globalVendor: { riskAssessmentData: unknown; riskAssessmentVersion: string | null } | null, + globalVendor: { + riskAssessmentData: unknown; + riskAssessmentVersion: string | null; + } | null, withResearch: boolean, ): boolean { // If withResearch is true, task was triggered because research is needed (we filter before triggering) @@ -135,7 +140,9 @@ function isJsonInputValue(value: unknown): value is Prisma.InputJsonValue { } if (typeof value === 'object') { - return Object.values(value as Record).every(isJsonInputValue); + return Object.values(value as Record).every( + isJsonInputValue, + ); } return false; @@ -170,7 +177,9 @@ function extractDomain(website: string | null | undefined): string | null { try { // Add protocol if missing to make URL parsing work - const urlString = /^https?:\/\//i.test(trimmed) ? trimmed : `https://${trimmed}`; + const urlString = /^https?:\/\//i.test(trimmed) + ? trimmed + : `https://${trimmed}`; const url = new URL(urlString); // Remove www. prefix and return just the domain return url.hostname.toLowerCase().replace(/^www\./, ''); @@ -216,7 +225,6 @@ export const vendorRiskAssessmentTask: Task< }, maxDuration: 1000 * 60 * 10, run: async (payload) => { - const vendor = await db.vendor.findFirst({ where: { id: payload.vendorId, @@ -255,7 +263,10 @@ export const vendorRiskAssessmentTask: Task< const normalizedWebsite = normalizeWebsite(vendor.website); if (!normalizedWebsite) { - logger.info('⏭️ SKIP (invalid website)', { vendor: payload.vendorName, website: vendor.website }); + logger.info('⏭️ SKIP (invalid website)', { + vendor: payload.vendorName, + website: vendor.website, + }); await db.vendor.update({ where: { id: vendor.id }, data: { status: VendorStatus.assessed }, @@ -287,19 +298,19 @@ export const vendorRiskAssessmentTask: Task< riskAssessmentUpdatedAt: true, riskAssessmentData: true, }, - orderBy: [ - { riskAssessmentUpdatedAt: 'desc' }, - { createdAt: 'desc' }, - ], + orderBy: [{ riskAssessmentUpdatedAt: 'desc' }, { createdAt: 'desc' }], }) : []; - + // Use the most recent one for reading/checking, but we'll update all duplicates const globalVendor = globalVendors[0] ?? null; // Determine if research is needed // If withResearch is true, task was triggered because research is needed (we filter before triggering) - const needsResearch = shouldDoResearch(globalVendor, payload.withResearch ?? false); + const needsResearch = shouldDoResearch( + globalVendor, + payload.withResearch ?? false, + ); if (needsResearch) { logger.info('🔍 DOING RESEARCH', { @@ -316,10 +327,11 @@ export const vendorRiskAssessmentTask: Task< // Still ensure a "Verify risk assessment" task exists so humans can confirm accuracy, // even when we are reusing cached GlobalVendors data (no research performed). - const { creatorMemberId, assigneeMemberId } = await resolveTaskCreatorAndAssignee({ - organizationId: payload.organizationId, - createdByUserId: payload.createdByUserId ?? null, - }); + const { creatorMemberId, assigneeMemberId } = + await resolveTaskCreatorAndAssignee({ + organizationId: payload.organizationId, + createdByUserId: payload.createdByUserId ?? null, + }); const creatorMember = await db.member.findUnique({ where: { id: creatorMemberId }, @@ -344,7 +356,8 @@ export const vendorRiskAssessmentTask: Task< await db.taskItem.create({ data: { title: VERIFY_RISK_ASSESSMENT_TASK_TITLE, - description: 'Review the latest Risk Assessment and confirm it is accurate.', + description: + 'Review the latest Risk Assessment and confirm it is accurate.', status: TaskItemStatus.todo, priority: TaskItemPriority.high, entityId: payload.vendorId, @@ -365,7 +378,8 @@ export const vendorRiskAssessmentTask: Task< }, data: { status: TaskItemStatus.todo, - description: 'Review the latest Risk Assessment and confirm it is accurate.', + description: + 'Review the latest Risk Assessment and confirm it is accurate.', assigneeId: assigneeMemberId, updatedById: creatorMemberId, }, @@ -418,10 +432,11 @@ export const vendorRiskAssessmentTask: Task< }, }); - const { creatorMemberId, assigneeMemberId } = await resolveTaskCreatorAndAssignee({ - organizationId: payload.organizationId, - createdByUserId: payload.createdByUserId ?? null, - }); + const { creatorMemberId, assigneeMemberId } = + await resolveTaskCreatorAndAssignee({ + organizationId: payload.organizationId, + createdByUserId: payload.createdByUserId ?? null, + }); // Get creator member with userId for activity log const creatorMember = await db.member.findUnique({ @@ -430,10 +445,13 @@ export const vendorRiskAssessmentTask: Task< }); if (!creatorMember?.userId) { - logger.warn('Creator member has no userId, skipping activity log creation', { - creatorMemberId, - organizationId: payload.organizationId, - }); + logger.warn( + 'Creator member has no userId, skipping activity log creation', + { + creatorMemberId, + organizationId: payload.organizationId, + }, + ); } // Ensure a "Verify risk assessment" task exists immediately, but keep it blocked while generation runs. @@ -474,9 +492,9 @@ export const vendorRiskAssessmentTask: Task< try { await db.auditLog.create({ data: { - organizationId: payload.organizationId, + organizationId: payload.organizationId, userId: creatorMember.userId, - memberId: creatorMemberId, + memberId: creatorMemberId, entityType: 'task', entityId: verifyTaskItemId, description: 'created this task', @@ -488,7 +506,7 @@ export const vendorRiskAssessmentTask: Task< parentEntityId: payload.vendorId, }, }, - }); + }); } catch (error) { logger.error('Failed to log task item creation:', error); // Don't throw - audit log failures should not block operations @@ -500,7 +518,8 @@ export const vendorRiskAssessmentTask: Task< const frameworkChecklist = buildFrameworkChecklist(organizationFrameworks); // Do research if needed (vendor doesn't exist, no data, or explicitly requested) - const research = needsResearch && payload.vendorWebsite + const research = + needsResearch && payload.vendorWebsite ? await firecrawlAgentVendorRiskAssessment({ vendorName: payload.vendorName, vendorWebsite: payload.vendorWebsite, @@ -532,7 +551,10 @@ export const vendorRiskAssessmentTask: Task< riskAssessmentVersion: true, riskAssessmentUpdatedAt: true, }, - orderBy: [{ riskAssessmentUpdatedAt: 'desc' }, { createdAt: 'desc' }], + orderBy: [ + { riskAssessmentUpdatedAt: 'desc' }, + { createdAt: 'desc' }, + ], }) : []; @@ -575,7 +597,10 @@ export const vendorRiskAssessmentTask: Task< }, }); - return { nextVersion: computedNext, updatedWebsites: [normalizedWebsite] }; + return { + nextVersion: computedNext, + updatedWebsites: [normalizedWebsite], + }; }, }); @@ -603,7 +628,8 @@ export const vendorRiskAssessmentTask: Task< }, data: { status: TaskItemStatus.todo, - description: 'Review the latest Risk Assessment and confirm it is accurate.', + description: + 'Review the latest Risk Assessment and confirm it is accurate.', // Keep stable assignee/creator assigneeId: assigneeMemberId, updatedById: creatorMemberId, @@ -626,5 +652,3 @@ export const vendorRiskAssessmentTask: Task< }; }, }); - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-schema.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-schema.ts index 0532376c3..f9f1f9541 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-schema.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-schema.ts @@ -1,8 +1,14 @@ import { z } from 'zod'; -const urlOrEmptySchema = z.union([z.string().url(), z.literal('')]).optional().nullable(); +const urlOrEmptySchema = z + .union([z.string().url(), z.literal('')]) + .optional() + .nullable(); // Firecrawl may return various date formats (ISO, "YYYY-MM-DD", etc). We normalize later. -const dateStringOrEmptySchema = z.union([z.string(), z.literal('')]).optional().nullable(); +const dateStringOrEmptySchema = z + .union([z.string(), z.literal('')]) + .optional() + .nullable(); export const vendorRiskAssessmentAgentSchema = z.object({ risk_level: z.string().optional().nullable(), @@ -12,7 +18,10 @@ export const vendorRiskAssessmentAgentSchema = z.object({ .array( z.object({ type: z.string(), - status: z.enum(['verified', 'expired', 'not_certified', 'unknown']).optional().nullable(), + status: z + .enum(['verified', 'expired', 'not_certified', 'unknown']) + .optional() + .nullable(), issued_at: dateStringOrEmptySchema, expires_at: dateStringOrEmptySchema, url: urlOrEmptySchema, @@ -38,7 +47,10 @@ export const vendorRiskAssessmentAgentSchema = z.object({ summary: z.string().optional().nullable(), source: z.string().optional().nullable(), url: urlOrEmptySchema, - sentiment: z.enum(['positive', 'negative', 'neutral']).optional().nullable(), + sentiment: z + .enum(['positive', 'negative', 'neutral']) + .optional() + .nullable(), }), ) .optional() @@ -48,5 +60,3 @@ export const vendorRiskAssessmentAgentSchema = z.object({ export type VendorRiskAssessmentAgentResult = z.infer< typeof vendorRiskAssessmentAgentSchema >; - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-types.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-types.ts index 2fdbb2dba..b934550b9 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-types.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/agent-types.ts @@ -17,7 +17,10 @@ export type VendorRiskAssessmentLink = { url: string; }; -export type VendorRiskAssessmentNewsSentiment = 'positive' | 'negative' | 'neutral'; +export type VendorRiskAssessmentNewsSentiment = + | 'positive' + | 'negative' + | 'neutral'; export type VendorRiskAssessmentNewsItem = { date: string; @@ -39,5 +42,3 @@ export type VendorRiskAssessmentDataV1 = { links?: VendorRiskAssessmentLink[] | null; news?: VendorRiskAssessmentNewsItem[] | null; }; - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/assignee.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/assignee.ts index c14db58b1..fee1bced2 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/assignee.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/assignee.ts @@ -35,7 +35,9 @@ export async function resolveTaskCreatorAndAssignee(params: { const creatorMemberId = creatorMember?.id ?? adminMember?.id ?? anyMember?.id; if (!creatorMemberId) { - throw new Error(`No active members found for organization ${organizationId}`); + throw new Error( + `No active members found for organization ${organizationId}`, + ); } return { @@ -43,5 +45,3 @@ export async function resolveTaskCreatorAndAssignee(params: { assigneeMemberId: creatorMember?.id ?? adminMember?.id ?? null, }; } - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/constants.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/constants.ts index 64e35e9ff..098b714c9 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/constants.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/constants.ts @@ -1,5 +1,4 @@ -export const VENDOR_RISK_ASSESSMENT_TASK_ID = 'vendor-risk-assessment-task' as const; +export const VENDOR_RISK_ASSESSMENT_TASK_ID = + 'vendor-risk-assessment-task' as const; export const VENDOR_RISK_ASSESSMENT_TASK_TITLE = 'Risk Assessment' as const; - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/description.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/description.ts index 32c135c0e..017e4d10f 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/description.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/description.ts @@ -32,8 +32,7 @@ export function buildRiskAssessmentDescription(params: { ...base, vendorName: base.vendorName ?? vendorName, vendorWebsite: base.vendorWebsite ?? vendorWebsite, - securityAssessment: (base.securityAssessment ?? '') + checklistSuffix || null, + securityAssessment: + (base.securityAssessment ?? '') + checklistSuffix || null, } satisfies VendorRiskAssessmentDataV1); } - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl-agent.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl-agent.ts index 3c5ad6108..7b1cc6e2d 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl-agent.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl-agent.ts @@ -10,7 +10,8 @@ function normalizeUrl(url: string | null | undefined): string | null { if (trimmed === '') return null; const looksLikeDomain = - !/^https?:\/\//i.test(trimmed) && /^[a-z0-9.-]+\.[a-z]{2,}([/].*)?$/i.test(trimmed); + !/^https?:\/\//i.test(trimmed) && + /^[a-z0-9.-]+\.[a-z]{2,}([/].*)?$/i.test(trimmed); const candidate = looksLikeDomain ? `https://${trimmed}` : trimmed; try { @@ -37,7 +38,9 @@ export async function firecrawlAgentVendorRiskAssessment(params: { }): Promise { const apiKey = process.env.FIRECRAWL_API_KEY; if (!apiKey) { - logger.warn('FIRECRAWL_API_KEY is not configured; skipping vendor research'); + logger.warn( + 'FIRECRAWL_API_KEY is not configured; skipping vendor research', + ); return null; } @@ -47,7 +50,9 @@ export async function firecrawlAgentVendorRiskAssessment(params: { try { origin = new URL(vendorWebsite).origin; } catch { - logger.warn('Invalid website URL provided to Firecrawl Agent', { vendorWebsite }); + logger.warn('Invalid website URL provided to Firecrawl Agent', { + vendorWebsite, + }); return null; } @@ -126,7 +131,10 @@ Focus on their official website (especially trust/security/compliance pages), pr summary: { type: 'string' }, source: { type: 'string' }, url: { type: 'string' }, - sentiment: { type: 'string', enum: ['positive', 'negative', 'neutral'] }, + sentiment: { + type: 'string', + enum: ['positive', 'negative', 'neutral'], + }, }, required: ['date', 'title'], }, @@ -147,11 +155,22 @@ Focus on their official website (especially trust/security/compliance pages), pr const links = parsed.data.links ?? null; const linkPairs: Array<{ label: string; url: string }> = []; - if (links?.trust_center_url) linkPairs.push({ label: 'Trust & Security', url: links.trust_center_url }); - if (links?.security_page_url) linkPairs.push({ label: 'Security Overview', url: links.security_page_url }); - if (links?.soc2_report_url) linkPairs.push({ label: 'SOC 2 Report', url: links.soc2_report_url }); - if (links?.privacy_policy_url) linkPairs.push({ label: 'Privacy Policy', url: links.privacy_policy_url }); - if (links?.terms_of_service_url) linkPairs.push({ label: 'Terms of Service', url: links.terms_of_service_url }); + if (links?.trust_center_url) + linkPairs.push({ label: 'Trust & Security', url: links.trust_center_url }); + if (links?.security_page_url) + linkPairs.push({ + label: 'Security Overview', + url: links.security_page_url, + }); + if (links?.soc2_report_url) + linkPairs.push({ label: 'SOC 2 Report', url: links.soc2_report_url }); + if (links?.privacy_policy_url) + linkPairs.push({ label: 'Privacy Policy', url: links.privacy_policy_url }); + if (links?.terms_of_service_url) + linkPairs.push({ + label: 'Terms of Service', + url: links.terms_of_service_url, + }); const normalizedLinks = linkPairs .map((l) => ({ ...l, url: normalizeUrl(l.url) })) @@ -199,7 +218,9 @@ Focus on their official website (especially trust/security/compliance pages), pr kind: 'vendorRiskAssessmentV1', vendorName, vendorWebsite, - lastResearchedAt: normalizeIso(parsed.data.last_researched_at ?? null) ?? new Date().toISOString(), + lastResearchedAt: + normalizeIso(parsed.data.last_researched_at ?? null) ?? + new Date().toISOString(), riskLevel: parsed.data.risk_level ?? null, securityAssessment: parsed.data.security_assessment ?? null, certifications: certifications.length > 0 ? certifications : null, @@ -218,5 +239,3 @@ Focus on their official website (especially trust/security/compliance pages), pr return result; } - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl.ts index 6af98c902..4ce68c7c5 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/firecrawl.ts @@ -21,7 +21,10 @@ function normalizeUrl(url: string | null | undefined): string | null { if (!trimmed || trimmed === '') return null; // If it looks like a domain but missing scheme, assume https - if (!/^https?:\/\//i.test(trimmed) && /^[a-z0-9.-]+\.[a-z]{2,}([/].*)?$/i.test(trimmed)) { + if ( + !/^https?:\/\//i.test(trimmed) && + /^[a-z0-9.-]+\.[a-z]{2,}([/].*)?$/i.test(trimmed) + ) { trimmed = `https://${trimmed}`; } @@ -47,7 +50,9 @@ export async function firecrawlExtractVendorData( ): Promise { const apiKey = process.env.FIRECRAWL_API_KEY; if (!apiKey) { - logger.warn('FIRECRAWL_API_KEY is not configured; skipping vendor research'); + logger.warn( + 'FIRECRAWL_API_KEY is not configured; skipping vendor research', + ); return null; } @@ -66,9 +71,9 @@ export async function firecrawlExtractVendorData( 'Content-Type': 'application/json', Authorization: `Bearer ${apiKey}`, }, - body: JSON.stringify({ - urls: [`${origin}/*`], - prompt: `You are a security analyst collecting SOC 2 + ISO 27001 evidence links for a third-party risk assessment. + body: JSON.stringify({ + urls: [`${origin}/*`], + prompt: `You are a security analyst collecting SOC 2 + ISO 27001 evidence links for a third-party risk assessment. Goal: return the MOST SPECIFIC, DIRECT URL for each document type below. Do not return general category pages. @@ -102,7 +107,8 @@ When multiple candidates exist, choose the most direct URL that best matches the properties: { company_description: { type: 'string', - description: 'Brief 1-2 sentence description of what the company does and their main services/products', + description: + 'Brief 1-2 sentence description of what the company does and their main services/products', }, privacy_policy_url: { type: 'string', @@ -137,19 +143,22 @@ When multiple candidates exist, choose the most direct URL that best matches the }, }, }, - enableWebSearch: true, - includeSubdomains: true, - showSources: true, - scrapeOptions: { - onlyMainContent: false, - removeBase64Images: true, - }, - }), - }); + enableWebSearch: true, + includeSubdomains: true, + showSources: true, + scrapeOptions: { + onlyMainContent: false, + removeBase64Images: true, + }, + }), + }); const initialData = (await initialResponse.json()) as FirecrawlStartResponse; if (!initialData.success || !initialData.id) { - logger.warn('Firecrawl failed to start extraction', { website, initialData }); + logger.warn('Firecrawl failed to start extraction', { + website, + initialData, + }); return null; } @@ -209,7 +218,11 @@ When multiple candidates exist, choose the most direct URL that best matches the } if (statusData.status === 'failed' || statusData.status === 'cancelled') { - logger.warn('Firecrawl extraction did not complete', { website, jobId, statusData }); + logger.warn('Firecrawl extraction did not complete', { + website, + jobId, + statusData, + }); return null; } } @@ -217,5 +230,3 @@ When multiple candidates exist, choose the most direct URL that best matches the logger.warn('Firecrawl extraction timed out', { website, jobId }); return null; } - - diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/frameworks.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/frameworks.ts index 8aa620a1b..2fb1371bc 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/frameworks.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/frameworks.ts @@ -17,23 +17,33 @@ type FrameworkRule = { const FRAMEWORK_RULES: FrameworkRule[] = [ { match: /\bsoc\s*2\b/i, - checks: ['Review their SOC 2 report (Type I / Type II) and note any exceptions.'], + checks: [ + 'Review their SOC 2 report (Type I / Type II) and note any exceptions.', + ], }, { match: /\biso\s*27001\b/i, - checks: ['Review their ISO 27001 certificate and scope/SoA (if available).'], + checks: [ + 'Review their ISO 27001 certificate and scope/SoA (if available).', + ], }, { match: /\bgdpr\b/i, - checks: ['Check for a DPA (Data Processing Agreement) and confirm GDPR commitments.'], + checks: [ + 'Check for a DPA (Data Processing Agreement) and confirm GDPR commitments.', + ], }, { match: /\bhipaa\b/i, - checks: ['If PHI is involved, confirm whether they offer a BAA and required safeguards.'], + checks: [ + 'If PHI is involved, confirm whether they offer a BAA and required safeguards.', + ], }, { match: /\bpci\b|\bpci\s*dss\b/i, - checks: ['If payment data is involved, confirm PCI DSS compliance / attestation.'], + checks: [ + 'If payment data is involved, confirm PCI DSS compliance / attestation.', + ], }, ]; @@ -75,7 +85,9 @@ export function buildFrameworkChecklist(frameworks: OrgFramework[]): string[] { .filter(Boolean) .join(', '); if (frameworkList) { - return [`Review vendor documentation relevant to your frameworks: ${frameworkList}.`]; + return [ + `Review vendor documentation relevant to your frameworks: ${frameworkList}.`, + ]; } } @@ -84,4 +96,4 @@ export function buildFrameworkChecklist(frameworks: OrgFramework[]): string[] { export function getDefaultFrameworks(): OrgFramework[] { return DEFAULT_FRAMEWORKS; -} \ No newline at end of file +} diff --git a/apps/api/src/trigger/vendor/vendor-risk-assessment/schema.ts b/apps/api/src/trigger/vendor/vendor-risk-assessment/schema.ts index 35628ffb4..0f2363c5c 100644 --- a/apps/api/src/trigger/vendor/vendor-risk-assessment/schema.ts +++ b/apps/api/src/trigger/vendor/vendor-risk-assessment/schema.ts @@ -2,11 +2,26 @@ import { z } from 'zod'; export const firecrawlVendorDataSchema = z.object({ company_description: z.string().optional().nullable(), - privacy_policy_url: z.union([z.string().url(), z.literal('')]).optional().nullable(), - terms_of_service_url: z.union([z.string().url(), z.literal('')]).optional().nullable(), - security_overview_url: z.union([z.string().url(), z.literal('')]).optional().nullable(), - trust_portal_url: z.union([z.string().url(), z.literal('')]).optional().nullable(), - soc2_report_url: z.union([z.string().url(), z.literal('')]).optional().nullable(), + privacy_policy_url: z + .union([z.string().url(), z.literal('')]) + .optional() + .nullable(), + terms_of_service_url: z + .union([z.string().url(), z.literal('')]) + .optional() + .nullable(), + security_overview_url: z + .union([z.string().url(), z.literal('')]) + .optional() + .nullable(), + trust_portal_url: z + .union([z.string().url(), z.literal('')]) + .optional() + .nullable(), + soc2_report_url: z + .union([z.string().url(), z.literal('')]) + .optional() + .nullable(), certified_security_frameworks: z.array(z.string()).optional().nullable(), }); @@ -28,5 +43,3 @@ export const vendorRiskAssessmentPayloadSchema = z.object({ export type VendorRiskAssessmentPayload = z.infer< typeof vendorRiskAssessmentPayloadSchema >; - - diff --git a/apps/api/src/trust-portal/dto/trust-document.dto.ts b/apps/api/src/trust-portal/dto/trust-document.dto.ts index a040f11ee..6f7825594 100644 --- a/apps/api/src/trust-portal/dto/trust-document.dto.ts +++ b/apps/api/src/trust-portal/dto/trust-document.dto.ts @@ -89,5 +89,3 @@ export class DeleteTrustDocumentDto { @IsString() organizationId!: string; } - - diff --git a/apps/api/src/trust-portal/nda-pdf.service.ts b/apps/api/src/trust-portal/nda-pdf.service.ts index 2bff49a4c..559602eb1 100644 --- a/apps/api/src/trust-portal/nda-pdf.service.ts +++ b/apps/api/src/trust-portal/nda-pdf.service.ts @@ -145,34 +145,70 @@ By signing below, the Receiving Party agrees to be bound by the terms of this Ag agreementId: string, customWatermarkText?: string, ) { - const font = await pdfDoc.embedFont(StandardFonts.HelveticaBold); + const fontBold = await pdfDoc.embedFont(StandardFonts.HelveticaBold); + const fontRegular = await pdfDoc.embedFont(StandardFonts.Helvetica); const pages = pdfDoc.getPages(); - const timestamp = new Date().toISOString(); - const watermarkText = - customWatermarkText || - `For: ${name} <${email}> | ${timestamp} | ID: ${agreementId}`; + const watermarkText = 'CompAI'; + const requestedByText = `Requested by: ${email}`; + const fontSize = 48; + const subTextSize = 12; for (const page of pages) { const { width, height } = page.getSize(); - const textWidth = font.widthOfTextAtSize(watermarkText, 10); - - page.drawText(watermarkText, { - x: width / 2 - textWidth / 2, - y: height / 2, - size: 10, - font, - color: rgb(0.8, 0.8, 0.8), - opacity: 0.3, - rotate: degrees(-45), - }); + const pageNumber = pages.indexOf(page) + 1; + + // Create a repeating diagonal watermark pattern with alternating angles + const horizontalSpacing = 250; // Space between watermarks horizontally + const verticalSpacing = 180; // Space between watermarks vertically + + // Calculate how many watermarks we need to cover the page + const numRows = Math.ceil(height / verticalSpacing) + 2; + const numCols = Math.ceil(width / horizontalSpacing) + 2; + + for (let row = -1; row < numRows; row++) { + for (let col = -1; col < numCols; col++) { + // Create a checkerboard-like offset pattern + const offsetX = (row % 2) * (horizontalSpacing / 2); + const x = col * horizontalSpacing + offsetX; + const y = row * verticalSpacing; + + // Alternate between -45 and -35 degrees for visual interest + const angle = (row + col) % 2 === 0 ? -45 : -35; + + // Main "CompAI" watermark + page.drawText(watermarkText, { + x, + y, + size: fontSize, + font: fontBold, + color: rgb(0.85, 0.85, 0.85), // Darker gray + opacity: 0.1, // Increased opacity for better visibility + rotate: degrees(angle), + }); + + // "Requested by: [email]" text below CompAI + page.drawText(requestedByText, { + x: x - 10, + y: y - 18, + size: subTextSize, + font: fontRegular, + color: rgb(0.85, 0.85, 0.85), // Darker gray + opacity: 0.1, // Slightly darker and more visible + rotate: degrees(angle), + }); + } + } + + // Add small footer with page number and document ID + const footerText = `Page ${pageNumber} of ${pages.length} • Document ID: ${agreementId.split('-').pop()?.slice(0, 8)}`; - page.drawText(`Document ID: ${agreementId}`, { - x: 50, - y: 20, + page.drawText(footerText, { + x: width / 2 - fontRegular.widthOfTextAtSize(footerText, 8) / 2, + y: 15, size: 8, - font, - color: rgb(0.5, 0.5, 0.5), + font: fontRegular, + color: rgb(0.6, 0.6, 0.6), }); } } diff --git a/apps/api/src/trust-portal/policy-pdf-renderer.service.ts b/apps/api/src/trust-portal/policy-pdf-renderer.service.ts index c40c8d567..9a8e3d9ac 100644 --- a/apps/api/src/trust-portal/policy-pdf-renderer.service.ts +++ b/apps/api/src/trust-portal/policy-pdf-renderer.service.ts @@ -27,6 +27,50 @@ interface PolicyForPDF { @Injectable() export class PolicyPdfRendererService { + /** + * Convert hex color to RGB values (0-255 range for jsPDF) + */ + private hexToRgb(hex: string): { r: number; g: number; b: number } { + const cleanHex = hex.replace('#', ''); + const r = parseInt(cleanHex.substring(0, 2), 16); + const g = parseInt(cleanHex.substring(2, 4), 16); + const b = parseInt(cleanHex.substring(4, 6), 16); + return { r, g, b }; + } + + /** + * Get accent color from organization or use default + */ + private getAccentColor(primaryColor: string | null | undefined): { + r: number; + g: number; + b: number; + } { + // Default project primary color: dark teal/green (hsl(165, 100%, 15%) = #004D3D) + const defaultColor = { r: 0, g: 77, b: 61 }; + + if (!primaryColor) { + return defaultColor; + } + + const color = this.hexToRgb(primaryColor); + + // Check for NaN values (parseInt returns NaN for invalid hex) + if ( + Number.isNaN(color.r) || + Number.isNaN(color.g) || + Number.isNaN(color.b) + ) { + console.warn( + 'Invalid primary color format, using default:', + primaryColor, + ); + return defaultColor; + } + + return color; + } + private cleanTextForPDF(text: string): string { // Strip invisible/control-ish unicode chars that commonly appear via copy/paste. // These aren't visible in the editor, but previous logic converted unknown unicode to @@ -363,6 +407,8 @@ export class PolicyPdfRendererService { renderPoliciesPdfBuffer( policies: PolicyForPDF[], organizationName?: string, + primaryColor?: string | null, + totalPoliciesCount?: number, ): Buffer { const doc = new jsPDF(); const config: PDFConfig = { @@ -376,15 +422,65 @@ export class PolicyPdfRendererService { yPosition: 20, }; - const documentTitle = organizationName - ? `${organizationName} - All Policies` - : 'All Policies'; - const cleanTitle = this.cleanTextForPDF(documentTitle); + // Get organization primary color or use default + const accentColor = this.getAccentColor(primaryColor); + + // Add organization header if provided + if (organizationName) { + const cleanOrgName = this.cleanTextForPDF(organizationName); + + // Draw colored accent line at the top + config.doc.setLineWidth(3); + config.doc.setDrawColor(accentColor.r, accentColor.g, accentColor.b); + config.doc.line( + config.margin, + config.yPosition, + config.pageWidth - config.margin, + config.yPosition, + ); + + config.yPosition += config.lineHeight * 2.5; - config.doc.setFontSize(18); - config.doc.setFont('helvetica', 'bold'); - config.doc.text(cleanTitle, config.margin, config.yPosition); - config.yPosition += config.lineHeight * 3; + // Organization name - large and bold + config.doc.setFontSize(24); + config.doc.setFont('helvetica', 'bold'); + config.doc.setTextColor(0, 0, 0); + config.doc.text(cleanOrgName, config.margin, config.yPosition); + + config.yPosition += config.lineHeight * 2; + + // "All Policies" subtitle - simple and clean + config.doc.setFontSize(14); + config.doc.setFont('helvetica', 'normal'); + config.doc.setTextColor(100, 100, 100); // Light gray + config.doc.text('All Policies', config.margin, config.yPosition); + + config.yPosition += config.lineHeight * 2; + + // Metadata - minimal styling + config.doc.setFontSize(9); + config.doc.setFont('helvetica', 'normal'); + config.doc.setTextColor(140, 140, 140); // Lighter gray + + const generatedDate = new Date().toLocaleDateString('en-US', { + year: 'numeric', + month: 'long', + day: 'numeric', + }); + + config.doc.text(`${generatedDate}`, config.margin, config.yPosition); + + config.yPosition += config.lineHeight * 1.2; + + config.doc.text( + `Total Policies: ${totalPoliciesCount ?? policies.length}`, + config.margin, + config.yPosition, + ); + + // Extra spacing before policies + config.yPosition += config.lineHeight * 3; + } policies.forEach((policy, index) => { config.doc.setTextColor(0, 0, 0); @@ -394,13 +490,20 @@ export class PolicyPdfRendererService { config.yPosition = config.margin; } + // Add visual policy separator with icon and styled header if (policy.name) { - const cleanPolicyTitle = this.cleanTextForPDF(policy.name); - config.doc.setFontSize(16); + // Draw accent bar with organization's primary color + config.doc.setFillColor(accentColor.r, accentColor.g, accentColor.b); + config.doc.rect(config.margin, config.yPosition, 4, 12, 'F'); + + // Add "POLICY:" label and title + config.doc.setFontSize(14); config.doc.setFont('helvetica', 'bold'); - config.doc.setTextColor(0, 0, 0); - config.doc.text(cleanPolicyTitle, config.margin, config.yPosition); - config.yPosition += config.lineHeight * 2; + config.doc.setTextColor(30, 41, 59); // Dark slate color + const policyTitle = this.cleanTextForPDF(`POLICY: ${policy.name}`); + config.doc.text(policyTitle, config.margin + 10, config.yPosition + 8); + + config.yPosition += config.lineHeight * 4; } if (policy.content) { @@ -430,7 +533,7 @@ export class PolicyPdfRendererService { doc.setFontSize(8); doc.setTextColor(128, 128, 128); doc.text( - `Page ${i} of ${totalPages}`, + `Policy page ${i} of ${totalPages}`, config.pageWidth / 2, config.pageHeight - 10, { align: 'center' }, diff --git a/apps/api/src/trust-portal/trust-access.controller.ts b/apps/api/src/trust-portal/trust-access.controller.ts index 8784ae7c8..41d25e711 100644 --- a/apps/api/src/trust-portal/trust-access.controller.ts +++ b/apps/api/src/trust-portal/trust-access.controller.ts @@ -467,6 +467,21 @@ export class TrustAccessController { return this.trustAccessService.downloadAllPoliciesByAccessToken(token); } + @Get('access/:token/policies/download-all-zip') + @HttpCode(HttpStatus.OK) + @ApiOperation({ + summary: 'Download all policies as ZIP with individual PDFs', + description: + 'Generate ZIP archive containing individual watermarked PDFs for each policy', + }) + @ApiResponse({ + status: HttpStatus.OK, + description: 'Download URL for ZIP archive returned', + }) + async downloadAllPoliciesAsZip(@Param('token') token: string) { + return this.trustAccessService.downloadAllPoliciesAsZipByAccessToken(token); + } + @Get('access/:token/compliance-resources') @HttpCode(HttpStatus.OK) @ApiOperation({ @@ -509,7 +524,9 @@ export class TrustAccessController { description: 'Signed URL for ZIP archive returned', }) async downloadAllTrustDocuments(@Param('token') token: string) { - return this.trustAccessService.downloadAllTrustDocumentsByAccessToken(token); + return this.trustAccessService.downloadAllTrustDocumentsByAccessToken( + token, + ); } @Get('access/:token/documents/:documentId') diff --git a/apps/api/src/trust-portal/trust-access.service.ts b/apps/api/src/trust-portal/trust-access.service.ts index d4209fd33..95b853fc6 100644 --- a/apps/api/src/trust-portal/trust-access.service.ts +++ b/apps/api/src/trust-portal/trust-access.service.ts @@ -23,9 +23,60 @@ import { APP_AWS_ORG_ASSETS_BUCKET, s3Client } from '../app/s3'; import { Prisma, TrustFramework } from '@prisma/client'; import archiver from 'archiver'; import { PassThrough, Readable } from 'stream'; +import { PDFDocument, rgb, StandardFonts } from 'pdf-lib'; @Injectable() export class TrustAccessService { + /** + * Convert hex color to RGB values (0-1 range for pdf-lib) + * @param hex - Hex color string (e.g., "#3B82F6" or "3B82F6") + * @returns RGB object with r, g, b values between 0 and 1 + */ + private hexToRgb(hex: string): { r: number; g: number; b: number } { + // Remove # if present + const cleanHex = hex.replace('#', ''); + + // Parse hex values + const r = parseInt(cleanHex.substring(0, 2), 16) / 255; + const g = parseInt(cleanHex.substring(2, 4), 16) / 255; + const b = parseInt(cleanHex.substring(4, 6), 16) / 255; + + return { r, g, b }; + } + + /** + * Get accent color from organization or use default + */ + private getAccentColor(primaryColor: string | null | undefined): { + r: number; + g: number; + b: number; + } { + // Default project primary color: dark teal/green (hsl(165, 100%, 15%) = #004D3D) + const defaultColor = { r: 0, g: 0.302, b: 0.239 }; + + if (!primaryColor) { + return defaultColor; + } + + const color = this.hexToRgb(primaryColor); + + // Check for NaN values (parseInt returns NaN for invalid hex) + if ( + Number.isNaN(color.r) || + Number.isNaN(color.g) || + Number.isNaN(color.b) + ) { + console.warn( + 'Invalid primary color format, using default:', + primaryColor, + ); + return defaultColor; + } + + return color; + } + private readonly TRUST_APP_URL = process.env.TRUST_APP_URL || process.env.BASE_URL || @@ -735,15 +786,22 @@ export class TrustAccessService { // Check if grant has expired if (grant.expiresAt < now) { - throw new BadRequestException('Cannot resend access email for expired grant'); + throw new BadRequestException( + 'Cannot resend access email for expired grant', + ); } // Generate a new access token if expired or missing let accessToken = grant.accessToken; - if (!accessToken || (grant.accessTokenExpiresAt && grant.accessTokenExpiresAt < now)) { + if ( + !accessToken || + (grant.accessTokenExpiresAt && grant.accessTokenExpiresAt < now) + ) { accessToken = this.generateToken(32); - const accessTokenExpiresAt = new Date(now.getTime() + 24 * 60 * 60 * 1000); + const accessTokenExpiresAt = new Date( + now.getTime() + 24 * 60 * 60 * 1000, + ); await db.trustAccessGrant.update({ where: { id: grantId }, @@ -1492,9 +1550,7 @@ export class TrustAccessService { const archive = archiver('zip', { zlib: { level: 9 } }); const zipStream = new PassThrough(); - let putPromise: - | Promise - | undefined; + let putPromise: Promise | undefined; try { putPromise = s3Client.send( @@ -1758,6 +1814,7 @@ export class TrustAccessService { id: true, name: true, content: true, + pdfUrl: true, }, orderBy: [{ lastPublishedAt: 'desc' }, { updatedAt: 'desc' }], }); @@ -1766,14 +1823,215 @@ export class TrustAccessService { throw new NotFoundException('No published policies available'); } - const pdfBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( - policies.map((p) => ({ - name: p.name, - content: p.content, - })), - grant.accessRequest.organization.name, + // Create merged PDF document + const mergedPdf = await PDFDocument.create(); + + const organizationName = + grant.accessRequest.organization.name || 'Organization'; + + // Get organization primary color or use default + const accentColor = this.getAccentColor( + grant.accessRequest.organization.primaryColor, ); + // Embed fonts once before the loop (expensive operation) + const helveticaBold = await mergedPdf.embedFont( + StandardFonts.HelveticaBold, + ); + const helvetica = await mergedPdf.embedFont(StandardFonts.Helvetica); + + // Step 1: Fetch/render all PDFs in parallel (expensive I/O operations) + type PreparedPolicy = { + policy: (typeof policies)[0]; + pdfBuffer: Buffer; + isUploaded: boolean; + }; + + const preparePolicy = async ( + policy: (typeof policies)[0], + ): Promise => { + const hasUploadedPdf = policy.pdfUrl && policy.pdfUrl.trim() !== ''; + + if (hasUploadedPdf) { + try { + const pdfBuffer = await this.attachmentsService.getObjectBuffer( + policy.pdfUrl!, + ); + return { + policy, + pdfBuffer: Buffer.from(pdfBuffer), + isUploaded: true, + }; + } catch (error) { + console.warn( + `Failed to fetch uploaded PDF for policy ${policy.id}, falling back to content rendering:`, + error, + ); + } + } + + // Render from content (either no pdfUrl or fetch failed) + const renderedBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( + [{ name: policy.name, content: policy.content }], + undefined, // We'll add org header during merge + grant.accessRequest.organization.primaryColor, + policies.length, + ); + return { policy, pdfBuffer: renderedBuffer, isUploaded: false }; + }; + + const preparedPolicies = await Promise.all(policies.map(preparePolicy)); + + // Step 2: Merge PDFs sequentially (must be sequential for PDFDocument operations) + // Helper to add content-rendered policy to merged PDF + const addContentRenderedPolicy = async ( + policy: (typeof policies)[0], + addOrgHeader: boolean, + ) => { + const renderedBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( + [{ name: policy.name, content: policy.content }], + addOrgHeader ? organizationName : undefined, + grant.accessRequest.organization.primaryColor, + policies.length, + ); + const renderedPdf = await PDFDocument.load(renderedBuffer); + const copiedPages = await mergedPdf.copyPages( + renderedPdf, + renderedPdf.getPageIndices(), + ); + for (const page of copiedPages) { + mergedPdf.addPage(page); + } + }; + + let isFirst = true; + for (const { policy, pdfBuffer, isUploaded } of preparedPolicies) { + if (isUploaded) { + try { + const uploadedPdf = await PDFDocument.load(pdfBuffer, { + ignoreEncryption: true, + }); + + // Rebuild the FIRST page: embed original page into a taller page + const originalFirstPage = uploadedPdf.getPage(0); + const { width, height } = originalFirstPage.getSize(); + + const headerHeight = isFirst ? 120 : 60; + const embeddedFirstPage = await mergedPdf.embedPage(originalFirstPage); + const rebuiltFirstPage = mergedPdf.addPage([ + width, + height + headerHeight, + ]); + + rebuiltFirstPage.drawPage(embeddedFirstPage, { + x: 0, + y: 0, + width, + height, + }); + + let yPos = height + headerHeight - 25; + + if (isFirst) { + rebuiltFirstPage.drawLine({ + start: { x: 20, y: yPos + 8 }, + end: { x: width - 20, y: yPos + 8 }, + thickness: 2, + color: rgb(accentColor.r, accentColor.g, accentColor.b), + }); + + rebuiltFirstPage.drawText(`${organizationName} - All Policies`, { + x: 20, + y: yPos - 14, + size: 14, + font: helveticaBold, + color: rgb(0, 0, 0), + }); + + const generatedDate = new Date().toLocaleDateString('en-US', { + year: 'numeric', + month: 'short', + day: 'numeric', + }); + + rebuiltFirstPage.drawText( + `Generated: ${generatedDate} | Total: ${policies.length} policies`, + { + x: width - 180, + y: yPos - 14, + size: 8, + font: helvetica, + color: rgb(0.5, 0.5, 0.5), + }, + ); + + yPos -= 34; + isFirst = false; + } + + rebuiltFirstPage.drawRectangle({ + x: 55, + y: yPos - 40, + width: 10, + height: 26, + color: rgb(accentColor.r, accentColor.g, accentColor.b), + }); + + rebuiltFirstPage.drawText(`POLICY: ${policy.name}`, { + x: 75, + y: yPos - 34, + size: 16, + font: helveticaBold, + color: rgb(0.12, 0.16, 0.23), + }); + + // Remaining pages unchanged (page 2..n) + if (uploadedPdf.getPageCount() > 1) { + const copiedRemainingPages = await mergedPdf.copyPages( + uploadedPdf, + uploadedPdf.getPageIndices().slice(1), + ); + for (const page of copiedRemainingPages) { + mergedPdf.addPage(page); + } + } + } catch (error) { + // PDF is corrupted/malformed, fall back to content rendering + console.warn( + `Failed to parse uploaded PDF for policy ${policy.id}, falling back to content rendering:`, + error, + ); + await addContentRenderedPolicy(policy, isFirst); + isFirst = false; + } + } else { + // Content was already rendered, but re-render if first (needs org header) + await addContentRenderedPolicy(policy, isFirst); + isFirst = false; + } + } + + // Add page numbers to all pages in the merged PDF + const pages = mergedPdf.getPages(); + const totalPages = pages.length; + // helvetica font already embedded above + + for (let i = 0; i < totalPages; i++) { + const page = pages[i]; + const { width } = page.getSize(); + const pageNumber = i + 1; + + page.drawText(`Page ${pageNumber} of ${totalPages}`, { + x: width / 2 - 30, + y: 15, + size: 8, + font: helvetica, + color: rgb(0.5, 0.5, 0.5), + }); + } + + const pdfBuffer = Buffer.from(await mergedPdf.save()); + const bundleDocId = `bundle-${grant.id}-${Date.now()}`; const watermarked = await this.ndaPdfService.watermarkExistingPdf( pdfBuffer, @@ -1798,4 +2056,161 @@ export class TrustAccessService { return { name: 'All Policies', downloadUrl }; } -} \ No newline at end of file + + /** + * Convert a policy name to a safe filename + * "Security Updates" -> "security_updates" + */ + private toSafeFilename(name: string): string { + const safeName = name + .toLowerCase() + .replace(/[^a-z0-9\s-]/g, '') // Remove special characters + .replace(/\s+/g, '_') // Replace spaces with underscores + .replace(/-+/g, '_') // Replace hyphens with underscores + .replace(/_+/g, '_') // Collapse multiple underscores + .replace(/^_|_$/g, ''); // Remove leading/trailing underscores + + // Fallback for non-ASCII only names + return safeName || 'policy'; + } + + async downloadAllPoliciesAsZipByAccessToken(token: string) { + const grant = await this.validateAccessToken(token); + + const policies = await db.policy.findMany({ + where: { + organizationId: grant.accessRequest.organizationId, + status: 'published', + isArchived: false, + }, + select: { + id: true, + name: true, + content: true, + pdfUrl: true, + }, + orderBy: [{ lastPublishedAt: 'desc' }, { updatedAt: 'desc' }], + }); + + if (policies.length === 0) { + throw new NotFoundException('No published policies available'); + } + + const organizationName = + grant.accessRequest.organization.name || 'Organization'; + + // Create ZIP archive + const archive = archiver('zip', { zlib: { level: 6 } }); + const passThrough = new PassThrough(); + + archive.on('error', (err) => { + passThrough.destroy(err); + }); + + archive.pipe(passThrough); + + // Track filenames to avoid duplicates (case-insensitive) + const usedNamesLower = new Set(); + + const getUniqueFilename = (baseName: string): string => { + const filename = this.toSafeFilename(baseName); + let counter = 1; + let finalName = filename; + + while (usedNamesLower.has(finalName.toLowerCase())) { + finalName = `${filename}_${counter}`; + counter++; + } + + usedNamesLower.add(finalName.toLowerCase()); + return `${finalName}.pdf`; + }; + + // Process policies sequentially + for (const policy of policies) { + const hasUploadedPdf = policy.pdfUrl && policy.pdfUrl.trim() !== ''; + let policyPdfBuffer: Buffer; + + if (hasUploadedPdf) { + try { + const rawBuffer = await this.attachmentsService.getObjectBuffer( + policy.pdfUrl!, + ); + policyPdfBuffer = Buffer.from(rawBuffer); + } catch (error) { + console.warn( + `Failed to fetch uploaded PDF for policy ${policy.id}, falling back to content rendering:`, + error, + ); + policyPdfBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( + [{ name: policy.name, content: policy.content }], + undefined, + grant.accessRequest.organization.primaryColor, + ); + } + } else { + policyPdfBuffer = this.pdfRendererService.renderPoliciesPdfBuffer( + [{ name: policy.name, content: policy.content }], + undefined, + grant.accessRequest.organization.primaryColor, + ); + } + + // Watermark the PDF + const docId = `policy-${policy.id}-${Date.now()}`; + const watermarkedPdf = await this.ndaPdfService.watermarkExistingPdf( + policyPdfBuffer, + { + name: grant.accessRequest.name, + email: grant.subjectEmail, + docId, + }, + ); + + // Add to archive + const filename = getUniqueFilename(policy.name); + archive.append(watermarkedPdf, { name: filename }); + } + + // Collect ZIP buffer - set up listeners BEFORE finalize to avoid deadlock + const zipBufferPromise = new Promise((resolve, reject) => { + const chunks: Buffer[] = []; + passThrough.on('data', (chunk) => chunks.push(Buffer.from(chunk))); + passThrough.on('end', () => resolve(Buffer.concat(chunks))); + passThrough.on('error', reject); + }); + + // Finalize the archive + await archive.finalize(); + + // Wait for buffer to be collected + const zipBuffer = await zipBufferPromise; + + // Upload to S3 using attachmentsService (avoids streaming issues) + const safeOrgName = this.toSafeFilename(organizationName); + const dateStr = new Date().toISOString().split('T')[0]; + const downloadFilename = `${safeOrgName}_policies_${dateStr}.zip`; + + const zipKey = await this.attachmentsService.uploadToS3( + zipBuffer, + downloadFilename, + 'application/zip', + grant.accessRequest.organizationId, + 'trust_policy_downloads', + `${grant.id}`, + ); + + // Generate download URL with proper filename + const downloadUrl = + await this.attachmentsService.getPresignedDownloadUrlWithFilename( + zipKey, + downloadFilename, + ); + + return { + name: `${organizationName} - All Policies (ZIP)`, + downloadUrl, + policyCount: policies.length, + }; + } +} diff --git a/apps/api/src/vendors/dto/trigger-vendor-risk-assessment.dto.ts b/apps/api/src/vendors/dto/trigger-vendor-risk-assessment.dto.ts index 75bb3837e..4b705bf1e 100644 --- a/apps/api/src/vendors/dto/trigger-vendor-risk-assessment.dto.ts +++ b/apps/api/src/vendors/dto/trigger-vendor-risk-assessment.dto.ts @@ -34,7 +34,8 @@ export class TriggerVendorRiskAssessmentBatchDto { organizationId: string; @ApiProperty({ - description: 'If false, skips Firecrawl research (cheaper). Defaults to true.', + description: + 'If false, skips Firecrawl research (cheaper). Defaults to true.', required: false, default: true, }) @@ -51,5 +52,3 @@ export class TriggerVendorRiskAssessmentBatchDto { @Type(() => TriggerVendorRiskAssessmentVendorDto) vendors: TriggerVendorRiskAssessmentVendorDto[]; } - - diff --git a/apps/api/src/vendors/internal-vendor-automation.controller.ts b/apps/api/src/vendors/internal-vendor-automation.controller.ts index dcfb8b787..7c08817f3 100644 --- a/apps/api/src/vendors/internal-vendor-automation.controller.ts +++ b/apps/api/src/vendors/internal-vendor-automation.controller.ts @@ -18,18 +18,22 @@ export class InternalVendorAutomationController { @Post('risk-assessment/trigger-batch') @HttpCode(200) @ApiOperation({ - summary: 'Trigger vendor risk assessment tasks for a batch of vendors (internal)', + summary: + 'Trigger vendor risk assessment tasks for a batch of vendors (internal)', }) @ApiResponse({ status: 200, description: 'Tasks triggered' }) async triggerVendorRiskAssessmentBatch( @Body() body: TriggerVendorRiskAssessmentBatchDto, ) { // Log incoming request for debugging - console.log('[InternalVendorAutomationController] Received batch trigger request', { - organizationId: body.organizationId, - vendorCount: body.vendors.length, - withResearch: body.withResearch, - }); + console.log( + '[InternalVendorAutomationController] Received batch trigger request', + { + organizationId: body.organizationId, + vendorCount: body.vendors.length, + withResearch: body.withResearch, + }, + ); const result = await this.vendorsService.triggerVendorRiskAssessments({ organizationId: body.organizationId, @@ -38,7 +42,10 @@ export class InternalVendorAutomationController { vendors: body.vendors, }); - console.log('[InternalVendorAutomationController] Batch trigger completed', result); + console.log( + '[InternalVendorAutomationController] Batch trigger completed', + result, + ); return { success: true, @@ -46,5 +53,3 @@ export class InternalVendorAutomationController { }; } } - - diff --git a/apps/api/src/vendors/vendors.service.ts b/apps/api/src/vendors/vendors.service.ts index 10e277eb2..ea2d8c71e 100644 --- a/apps/api/src/vendors/vendors.service.ts +++ b/apps/api/src/vendors/vendors.service.ts @@ -7,7 +7,9 @@ import { Prisma } from '@prisma/client'; import type { TriggerVendorRiskAssessmentVendorDto } from './dto/trigger-vendor-risk-assessment.dto'; import { resolveTaskCreatorAndAssignee } from '../trigger/vendor/vendor-risk-assessment/assignee'; -const normalizeWebsite = (website: string | null | undefined): string | null => { +const normalizeWebsite = ( + website: string | null | undefined, +): string | null => { if (!website) return null; const trimmed = website.trim(); if (!trimmed) return null; @@ -40,7 +42,9 @@ const extractDomain = (website: string | null | undefined): string | null => { try { // Add protocol if missing to make URL parsing work - const urlString = /^https?:\/\//i.test(trimmed) ? trimmed : `https://${trimmed}`; + const urlString = /^https?:\/\//i.test(trimmed) + ? trimmed + : `https://${trimmed}`; const url = new URL(urlString); // Remove www. prefix and return just the domain return url.hostname.toLowerCase().replace(/^www\./, ''); @@ -126,10 +130,7 @@ export class VendorsService { riskAssessmentVersion: true, riskAssessmentUpdatedAt: true, }, - orderBy: [ - { riskAssessmentUpdatedAt: 'desc' }, - { createdAt: 'desc' }, - ], + orderBy: [{ riskAssessmentUpdatedAt: 'desc' }, { createdAt: 'desc' }], }); // Prefer record WITH risk assessment data (most recent) @@ -144,7 +145,8 @@ export class VendorsService { ...vendor, riskAssessmentData: globalVendorData?.riskAssessmentData ?? null, riskAssessmentVersion: globalVendorData?.riskAssessmentVersion ?? null, - riskAssessmentUpdatedAt: globalVendorData?.riskAssessmentUpdatedAt ?? null, + riskAssessmentUpdatedAt: + globalVendorData?.riskAssessmentUpdatedAt ?? null, }; this.logger.log(`Retrieved vendor: ${vendor.name} (${id})`); @@ -233,12 +235,21 @@ export class VendorsService { vendor: v, domain: extractDomain(v.vendorWebsite ?? null), })) - .filter((vd): vd is { vendor: TriggerVendorRiskAssessmentVendorDto; domain: string } => vd.domain !== null); + .filter( + ( + vd, + ): vd is { + vendor: TriggerVendorRiskAssessmentVendorDto; + domain: string; + } => vd.domain !== null, + ); // Check which domains already have risk assessment data using contains filter const existingDomains = new Set(); if (vendorDomains.length > 0) { - const uniqueDomains = Array.from(new Set(vendorDomains.map((vd) => vd.domain))); + const uniqueDomains = Array.from( + new Set(vendorDomains.map((vd) => vd.domain)), + ); const existing = await db.globalVendors.findMany({ where: { OR: uniqueDomains.map((domain) => ({ @@ -278,10 +289,11 @@ export class VendorsService { if (skippedVendors.length > 0) { const settled = await Promise.allSettled( skippedVendors.map(async (v) => { - const { creatorMemberId, assigneeMemberId } = await resolveTaskCreatorAndAssignee({ - organizationId, - createdByUserId: null, - }); + const { creatorMemberId, assigneeMemberId } = + await resolveTaskCreatorAndAssignee({ + organizationId, + createdByUserId: null, + }); const creatorMember = await db.member.findUnique({ where: { id: creatorMemberId }, @@ -303,7 +315,8 @@ export class VendorsService { const created = await db.taskItem.create({ data: { title: VERIFY_RISK_ASSESSMENT_TASK_TITLE, - description: 'Review the latest Risk Assessment and confirm it is accurate.', + description: + 'Review the latest Risk Assessment and confirm it is accurate.', status: TaskItemStatus.todo, priority: TaskItemPriority.high, entityId: v.vendorId, @@ -351,7 +364,8 @@ export class VendorsService { where: { id: existingVerifyTask.id }, data: { status: TaskItemStatus.todo, - description: 'Review the latest Risk Assessment and confirm it is accurate.', + description: + 'Review the latest Risk Assessment and confirm it is accurate.', assigneeId: assigneeMemberId, updatedById: creatorMemberId, }, @@ -363,33 +377,46 @@ export class VendorsService { const failures = settled.filter((r) => r.status === 'rejected'); if (failures.length > 0) { - this.logger.warn('Some verify tasks could not be ensured for skipped vendors', { - organizationId, - failures: failures.length, - skippedCount: skippedVendors.length, - }); + this.logger.warn( + 'Some verify tasks could not be ensured for skipped vendors', + { + organizationId, + failures: failures.length, + skippedCount: skippedVendors.length, + }, + ); } } } // Simplified logging: clear lists of what needs research vs what doesn't if (!withResearch && skippedVendors.length > 0) { - this.logger.log('✅ Vendors that DO NOT need research (already have data)', { - count: skippedVendors.length, - vendors: skippedVendors.map((v) => `${v.vendorName} (${v.vendorWebsite ?? 'no website'})`), - }); + this.logger.log( + '✅ Vendors that DO NOT need research (already have data)', + { + count: skippedVendors.length, + vendors: skippedVendors.map( + (v) => `${v.vendorName} (${v.vendorWebsite ?? 'no website'})`, + ), + }, + ); } if (vendorsToTrigger.length > 0) { this.logger.log('🔍 Vendors that NEED research (missing data)', { count: vendorsToTrigger.length, withResearch, - vendors: vendorsToTrigger.map((v) => `${v.vendorName} (${v.vendorWebsite ?? 'no website'})`), + vendors: vendorsToTrigger.map( + (v) => `${v.vendorName} (${v.vendorWebsite ?? 'no website'})`, + ), }); } else { - this.logger.log('✅ All vendors already have risk assessment data - no research needed', { - totalVendors: vendors.length, - }); + this.logger.log( + '✅ All vendors already have risk assessment data - no research needed', + { + totalVendors: vendors.length, + }, + ); } // Use batchTrigger for efficiency (less overhead than N individual triggers) @@ -412,7 +439,10 @@ export class VendorsService { return { triggered: 0, batchId: null }; } - const batchHandle = await tasks.batchTrigger('vendor-risk-assessment-task', batch); + const batchHandle = await tasks.batchTrigger( + 'vendor-risk-assessment-task', + batch, + ); this.logger.log('✅ Triggered risk assessment tasks', { count: vendorsToTrigger.length, @@ -424,12 +454,15 @@ export class VendorsService { batchId: batchHandle.batchId, }; } catch (error) { - this.logger.error('Failed to batch trigger vendor risk assessment tasks', { - organizationId, - vendorCount: vendorsToTrigger.length, - error: error instanceof Error ? error.message : String(error), - errorStack: error instanceof Error ? error.stack : undefined, - }); + this.logger.error( + 'Failed to batch trigger vendor risk assessment tasks', + { + organizationId, + vendorCount: vendorsToTrigger.length, + error: error instanceof Error ? error.message : String(error), + errorStack: error instanceof Error ? error.stack : undefined, + }, + ); throw error; } } diff --git a/apps/app/.env.example b/apps/app/.env.example index 83449eb52..be2acd8c1 100644 --- a/apps/app/.env.example +++ b/apps/app/.env.example @@ -38,6 +38,7 @@ APP_AWS_REGION="" # Required, for task attachments APP_AWS_ACCESS_KEY_ID="" # Required, for task attachments APP_AWS_SECRET_ACCESS_KEY="" # Required, for task attachments APP_AWS_ORG_ASSETS_BUCKET="" # Required, for org compliance and logo +APP_AWS_ENDPOINT="" # optional for using services like MinIO # TRIGGER REVAL REVALIDATION_SECRET="" # Revalidate server side, generate something random diff --git a/apps/app/package.json b/apps/app/package.json index 6e64cba08..34882d05c 100644 --- a/apps/app/package.json +++ b/apps/app/package.json @@ -56,7 +56,7 @@ "@tiptap/extension-table-row": "^3.4.4", "@trigger.dev/react-hooks": "4.0.6", "@trigger.dev/sdk": "4.0.6", - "@trycompai/db": "^1.3.20", + "@trycompai/db": "1.3.21", "@trycompai/email": "workspace:*", "@types/canvas-confetti": "^1.9.0", "@types/react-syntax-highlighter": "^15.5.13", diff --git a/apps/app/src/actions/organization/lib/initialize-organization.ts b/apps/app/src/actions/organization/lib/initialize-organization.ts index 6eff295be..ddea1607e 100644 --- a/apps/app/src/actions/organization/lib/initialize-organization.ts +++ b/apps/app/src/actions/organization/lib/initialize-organization.ts @@ -233,6 +233,7 @@ export const _upsertOrgFrameworkStructureCore = async ({ data: taskTemplatesForCreation.map((taskTemplate) => ({ title: taskTemplate.name, description: taskTemplate.description, + automationStatus: taskTemplate.automationStatus, organizationId: organizationId, taskTemplateId: taskTemplate.id, })), diff --git a/apps/app/src/actions/tasks/create-task-action.ts b/apps/app/src/actions/tasks/create-task-action.ts index db77e88e9..17bfd235f 100644 --- a/apps/app/src/actions/tasks/create-task-action.ts +++ b/apps/app/src/actions/tasks/create-task-action.ts @@ -42,6 +42,18 @@ export const createTaskAction = authActionClient } try { + // Get automation status from template if one is selected + let automationStatus: 'AUTOMATED' | 'MANUAL' = 'AUTOMATED'; + if (taskTemplateId) { + const template = await db.frameworkEditorTaskTemplate.findUnique({ + where: { id: taskTemplateId }, + select: { automationStatus: true }, + }); + if (template) { + automationStatus = template.automationStatus; + } + } + const task = await db.task.create({ data: { title, @@ -52,6 +64,7 @@ export const createTaskAction = authActionClient order: 0, frequency: frequency || null, department: department || null, + automationStatus, taskTemplateId: taskTemplateId || null, ...(controlIds && controlIds.length > 0 && { diff --git a/apps/app/src/actions/tasks/regenerate-task-action.ts b/apps/app/src/actions/tasks/regenerate-task-action.ts index afce4aa54..b00c6edf5 100644 --- a/apps/app/src/actions/tasks/regenerate-task-action.ts +++ b/apps/app/src/actions/tasks/regenerate-task-action.ts @@ -46,12 +46,13 @@ export const regenerateTaskAction = authActionClient throw new Error('Task has no associated template to regenerate from'); } - // Update the task with the template's current title and description + // Update the task with the template's current title, description, and automationStatus await db.task.update({ where: { id: taskId }, data: { title: task.taskTemplate.name, description: task.taskTemplate.description, + automationStatus: task.taskTemplate.automationStatus, }, }); diff --git a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/delete-policy-pdf.ts b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/delete-policy-pdf.ts new file mode 100644 index 000000000..c23562160 --- /dev/null +++ b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/delete-policy-pdf.ts @@ -0,0 +1,79 @@ +'use server'; + +import { authActionClient } from '@/actions/safe-action'; +import { BUCKET_NAME, s3Client } from '@/app/s3'; +import { DeleteObjectCommand } from '@aws-sdk/client-s3'; +import { db, PolicyDisplayFormat } from '@db'; +import { revalidatePath } from 'next/cache'; +import { headers } from 'next/headers'; +import { z } from 'zod'; + +const deletePolicyPdfSchema = z.object({ + policyId: z.string(), +}); + +export const deletePolicyPdfAction = authActionClient + .inputSchema(deletePolicyPdfSchema) + .metadata({ + name: 'delete-policy-pdf', + track: { + event: 'delete-policy-pdf-s3', + channel: 'server', + }, + }) + .action(async ({ parsedInput, ctx }) => { + const { policyId } = parsedInput; + const { session } = ctx; + const organizationId = session.activeOrganizationId; + + if (!organizationId) { + return { success: false, error: 'Not authorized' }; + } + + try { + // Get the policy to find the pdfUrl + const policy = await db.policy.findUnique({ + where: { id: policyId, organizationId }, + select: { pdfUrl: true }, + }); + + if (!policy) { + return { success: false, error: 'Policy not found' }; + } + + const oldPdfUrl = policy.pdfUrl; + + // Update policy first to remove pdfUrl and switch back to EDITOR format + await db.policy.update({ + where: { id: policyId, organizationId }, + data: { + pdfUrl: null, + displayFormat: PolicyDisplayFormat.EDITOR, + }, + }); + + // Delete from S3 after database is updated + if (oldPdfUrl && s3Client && BUCKET_NAME) { + try { + const deleteCommand = new DeleteObjectCommand({ + Bucket: BUCKET_NAME, + Key: oldPdfUrl, + }); + await s3Client.send(deleteCommand); + } catch (error) { + // Log error but we've already updated the database successfully + console.error('Error deleting PDF from S3 (orphaned file):', error); + } + } + + const headersList = await headers(); + let path = headersList.get('x-pathname') || headersList.get('referer') || ''; + path = path.replace(/\/[a-z]{2}\//, '/'); + revalidatePath(path); + + return { success: true }; + } catch (error) { + console.error('Error deleting policy PDF:', error); + return { success: false, error: 'Failed to delete PDF.' }; + } + }); diff --git a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/upload-policy-pdf.ts b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/upload-policy-pdf.ts index eb9c1c6bf..fba7e961b 100644 --- a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/upload-policy-pdf.ts +++ b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/actions/upload-policy-pdf.ts @@ -2,7 +2,7 @@ import { authActionClient } from '@/actions/safe-action'; import { BUCKET_NAME, s3Client } from '@/app/s3'; -import { PutObjectCommand } from '@aws-sdk/client-s3'; +import { DeleteObjectCommand, PutObjectCommand } from '@aws-sdk/client-s3'; import { db, PolicyDisplayFormat } from '@db'; import { revalidatePath } from 'next/cache'; import { headers } from 'next/headers'; @@ -41,17 +41,26 @@ export const uploadPolicyPdfAction = authActionClient const s3Key = `${organizationId}/policies/${policyId}/${Date.now()}-${sanitizedFileName}`; try { + // 1. Get the existing policy to check for an old PDF + const existingPolicy = await db.policy.findUnique({ + where: { id: policyId, organizationId }, + select: { pdfUrl: true }, + }); + + const oldPdfUrl = existingPolicy?.pdfUrl; + + // 2. Upload the new file to S3 const fileBuffer = Buffer.from(fileData, 'base64'); - const command = new PutObjectCommand({ + const putCommand = new PutObjectCommand({ Bucket: BUCKET_NAME, Key: s3Key, Body: fileBuffer, ContentType: fileType, }); - await s3Client.send(command); + await s3Client.send(putCommand); - // After a successful upload, update the policy to store the S3 Key + // 3. Update the database to point to the new S3 key await db.policy.update({ where: { id: policyId, organizationId }, data: { @@ -60,6 +69,20 @@ export const uploadPolicyPdfAction = authActionClient }, }); + // 4. Delete the old PDF from S3 (cleanup) + if (oldPdfUrl && oldPdfUrl !== s3Key && s3Client && BUCKET_NAME) { + try { + const deleteCommand = new DeleteObjectCommand({ + Bucket: BUCKET_NAME, + Key: oldPdfUrl, + }); + await s3Client.send(deleteCommand); + } catch (error) { + // Log cleanup error but the main task (uploading new) was successful + console.error('Error cleaning up old policy PDF from S3:', error); + } + } + const headersList = await headers(); let path = headersList.get('x-pathname') || headersList.get('referer') || ''; path = path.replace(/\/[a-z]{2}\//, '/'); diff --git a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/components/PdfViewer.tsx b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/components/PdfViewer.tsx index 8ee67c6de..5a126f779 100644 --- a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/components/PdfViewer.tsx +++ b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/components/PdfViewer.tsx @@ -1,15 +1,34 @@ 'use client'; +import { Button } from '@comp/ui/button'; import { Card, CardContent, CardHeader, CardTitle } from '@comp/ui/card'; import { cn } from '@comp/ui/cn'; -import { ExternalLink, FileText, Loader2 } from 'lucide-react'; +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, + AlertDialogTrigger, +} from '@comp/ui/alert-dialog'; +import { + DropdownMenu, + DropdownMenuContent, + DropdownMenuItem, + DropdownMenuTrigger, +} from '@comp/ui/dropdown-menu'; +import { ExternalLink, FileText, Loader2, MoreVertical, Trash2, Upload } from 'lucide-react'; import { useAction } from 'next-safe-action/hooks'; import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; +import { useEffect, useRef, useState } from 'react'; import Dropzone from 'react-dropzone'; import { toast } from 'sonner'; import { getPolicyPdfUrlAction } from '../actions/get-policy-pdf-url'; import { uploadPolicyPdfAction } from '../actions/upload-policy-pdf'; +import { deletePolicyPdfAction } from '../actions/delete-policy-pdf'; interface PdfViewerProps { policyId: string; @@ -22,6 +41,7 @@ export function PdfViewer({ policyId, pdfUrl, isPendingApproval }: PdfViewerProp const [files, setFiles] = useState([]); const [signedUrl, setSignedUrl] = useState(null); const [isUrlLoading, setUrlLoading] = useState(true); + const fileInputRef = useRef(null); const { execute: getUrl } = useAction(getPolicyPdfUrlAction, { onSuccess: (result) => { @@ -54,6 +74,35 @@ export function PdfViewer({ policyId, pdfUrl, isPendingApproval }: PdfViewerProp onError: (error) => toast.error(error.error.serverError || 'Failed to upload PDF.'), }); + const { execute: deletePdf, status: deleteStatus } = useAction(deletePolicyPdfAction, { + onSuccess: () => { + toast.success('PDF deleted successfully.'); + setSignedUrl(null); + router.refresh(); + }, + onError: (error) => toast.error(error.error.serverError || 'Failed to delete PDF.'), + }); + + const handleReplaceClick = () => { + fileInputRef.current?.click(); + }; + + const handleFileInputChange = (e: React.ChangeEvent) => { + const selectedFiles = e.target.files; + if (selectedFiles && selectedFiles.length > 0) { + const file = selectedFiles[0]; + if (file.size > 100 * 1024 * 1024) { + toast.error('File size must be less than 100MB'); + return; + } + handleUpload([file]); + } + // Reset input so the same file can be selected again + if (fileInputRef.current) { + fileInputRef.current.value = ''; + } + }; + // Handle file upload from FileUploader component const handleUpload = async (uploadFiles: File[]) => { if (!uploadFiles.length) return; @@ -95,25 +144,113 @@ export function PdfViewer({ policyId, pdfUrl, isPendingApproval }: PdfViewerProp }; const isUploading = uploadStatus === 'executing'; + const isDeleting = deleteStatus === 'executing'; + + const fileName = pdfUrl?.split('/').pop() || ''; + const MAX_FILENAME_LENGTH = 50; + const truncatedFileName = + fileName.length > MAX_FILENAME_LENGTH + ? `${fileName.substring(0, MAX_FILENAME_LENGTH)}...` + : fileName; return ( - - {signedUrl ? ( - - {pdfUrl?.split('/').pop()} - - - ) : ( - pdfUrl?.split('/').pop() +
+ + {signedUrl ? ( + + {truncatedFileName} + + + ) : ( + + {truncatedFileName} + + )} + + {pdfUrl && !isPendingApproval && ( + <> + + + + + + + + + Replace + + + + e.preventDefault()} + disabled={isUploading || isDeleting} + className="text-destructive focus:text-destructive" + > + + Delete + + + + + Delete PDF? + + Are you sure you want to delete this PDF? This action cannot be undone. + The policy will switch back to Editor View. + + + + Cancel + deletePdf({ policyId })} + className="bg-destructive text-destructive-foreground hover:bg-destructive/90" + > + {isDeleting ? ( + <> + + Deleting... + + ) : ( + 'Delete' + )} + + + + + + + )} - +
{pdfUrl ? ( @@ -150,7 +287,7 @@ export function PdfViewer({ policyId, pdfUrl, isPendingApproval }: PdfViewerProp maxSize={100 * 1024 * 1024} maxFiles={1} multiple={false} - disabled={isUploading} + disabled={isUploading || isDeleting} > {({ getRootProps, getInputProps, isDragActive }) => (

{isUploading ? 'Uploading new PDF...' - : isDragActive - ? 'Drop your new PDF here to replace the current one' - : 'Drag and drop a new PDF here to replace the current one, or click to browse (up to 100MB)'} + : isDeleting + ? 'Deleting PDF...' + : isDragActive + ? 'Drop your new PDF here to replace the current one' + : 'Drag and drop a new PDF here to replace the current one, or click to browse (up to 100MB)'}

)} @@ -183,7 +322,7 @@ export function PdfViewer({ policyId, pdfUrl, isPendingApproval }: PdfViewerProp maxSize={100 * 1024 * 1024} maxFiles={1} multiple={false} - disabled={isUploading} + disabled={isUploading || isDeleting} > {({ getRootProps, getInputProps, isDragActive }) => (
diff --git a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/editor/components/PolicyDetails.tsx b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/editor/components/PolicyDetails.tsx index ab03416ac..6eb7a568a 100644 --- a/apps/app/src/app/(app)/[orgId]/policies/[policyId]/editor/components/PolicyDetails.tsx +++ b/apps/app/src/app/(app)/[orgId]/policies/[policyId]/editor/components/PolicyDetails.tsx @@ -95,6 +95,8 @@ export function PolicyContentManager({ }: PolicyContentManagerProps) { const [showAiAssistant, setShowAiAssistant] = useState(aiAssistantEnabled); const [editorKey, setEditorKey] = useState(0); + const [activeTab, setActiveTab] = useState(displayFormat); + const previousTabRef = useRef(displayFormat); const [currentContent, setCurrentContent] = useState>(() => { const formattedContent = Array.isArray(policyContent) ? policyContent @@ -152,7 +154,19 @@ export function PolicyContentManager({ ); const switchFormat = useAction(switchPolicyDisplayFormatAction, { - onError: () => toast.error('Failed to switch view.'), + onSuccess: () => { + // Server action succeeded, update ref for next operation + previousTabRef.current = activeTab; + }, + onError: () => { + toast.error('Failed to switch view.'); + // Roll back to the previous tab state on error + setActiveTab(previousTabRef.current); + // Also restore AI assistant visibility if we were switching from EDITOR + if (previousTabRef.current === 'EDITOR' && aiAssistantEnabled) { + setShowAiAssistant(true); + } + }, }); const currentPolicyMarkdown = useMemo( @@ -194,9 +208,19 @@ export function PolicyContentManager({
- switchFormat.execute({ policyId, format: format as 'EDITOR' | 'PDF' }) - } + value={activeTab} + onValueChange={(format) => { + // Store current tab as previous before changing (using ref to avoid stale closure) + previousTabRef.current = activeTab; + // Optimistically update UI + setActiveTab(format); + // Hide AI assistant when switching to PDF view + if (format === 'PDF') { + setShowAiAssistant(false); + } + // Execute server action - onError will roll back if it fails + switchFormat.execute({ policyId, format: format as 'EDITOR' | 'PDF' }); + }} className="w-full" >
@@ -208,7 +232,7 @@ export function PolicyContentManager({ PDF View - {!isPendingApproval && aiAssistantEnabled && ( + {!isPendingApproval && aiAssistantEnabled && activeTab === 'EDITOR' && (
- {aiAssistantEnabled && showAiAssistant && ( + {aiAssistantEnabled && showAiAssistant && activeTab === 'EDITOR' && (
{ + const handleDownloadAll = async () => { setIsDownloadingAll(true); - // Fetch logs for all policies - const fetchAllLogs = async () => { - const logsEntries = await Promise.all( - data.map(async (policy) => { - const logs = await getLogsForPolicy(policy.id); - return [policy.id, logs] as const; - }), - ); - // Convert array of entries to an object - return Object.fromEntries(logsEntries); - }; - - // Since handleDownloadAll is not async, we need to handle the async logic here - fetchAllLogs().then((policyLogs) => { + try { + const response = await apiClient.get<{ + downloadUrl: string; + policyCount: number; + name: string; + }>('/v1/policies/download-all', orgId); + + if (response.error) { + toast.error(response.error); + return; + } + + if (response.data?.downloadUrl) { + // Open the download URL in a new tab + window.open(response.data.downloadUrl, '_blank'); + toast.success(`Downloaded ${response.data.policyCount} policies`); + } + } catch { + toast.error('Failed to download policies'); + } finally { setIsDownloadingAll(false); - downloadAllPolicies(data, policyLogs); - }); + } }; const getRowProps = React.useCallback( diff --git a/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/components/SingleTask.tsx b/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/components/SingleTask.tsx index 67f2a669b..012a8f288 100644 --- a/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/components/SingleTask.tsx +++ b/apps/app/src/app/(app)/[orgId]/tasks/[taskId]/components/SingleTask.tsx @@ -243,8 +243,8 @@ export function SingleTask({ Regenerate Task - This will update the task title and description with the latest content from the - framework template. The current content will be replaced. Continue? + This will update the task title, description, and automation status with the latest + content from the framework template. The current content will be replaced. Continue? diff --git a/apps/app/src/app/s3.ts b/apps/app/src/app/s3.ts index 1c4d38544..7b7bcc9f8 100644 --- a/apps/app/src/app/s3.ts +++ b/apps/app/src/app/s3.ts @@ -3,6 +3,7 @@ import { GetObjectCommand, S3Client } from '@aws-sdk/client-s3'; const APP_AWS_REGION = process.env.APP_AWS_REGION; const APP_AWS_ACCESS_KEY_ID = process.env.APP_AWS_ACCESS_KEY_ID; const APP_AWS_SECRET_ACCESS_KEY = process.env.APP_AWS_SECRET_ACCESS_KEY; +const APP_AWS_ENDPOINT = process.env.APP_AWS_ENDPOINT; export const BUCKET_NAME = process.env.APP_AWS_BUCKET_NAME; export const APP_AWS_QUESTIONNAIRE_UPLOAD_BUCKET = process.env.APP_AWS_QUESTIONNAIRE_UPLOAD_BUCKET; @@ -18,11 +19,13 @@ try { } s3ClientInstance = new S3Client({ + endpoint: APP_AWS_ENDPOINT || undefined, region: APP_AWS_REGION, credentials: { accessKeyId: APP_AWS_ACCESS_KEY_ID, secretAccessKey: APP_AWS_SECRET_ACCESS_KEY, }, + forcePathStyle: !!APP_AWS_ENDPOINT, }); } catch (error) { console.error('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!'); diff --git a/apps/portal/.env.example b/apps/portal/.env.example index 824addeee..1c537c3e1 100644 --- a/apps/portal/.env.example +++ b/apps/portal/.env.example @@ -15,6 +15,7 @@ APP_AWS_ACCESS_KEY_ID="" # AWS Access Key ID APP_AWS_SECRET_ACCESS_KEY="" # AWS Secret Access Key APP_AWS_REGION="" # AWS Region APP_AWS_BUCKET_NAME="" # AWS Bucket Name +APP_AWS_ENDPOINT="" # optional for using services like MinIO # Microsoft sign-in AUTH_MICROSOFT_CLIENT_ID= diff --git a/apps/portal/package.json b/apps/portal/package.json index e1c0c641c..e2e677493 100644 --- a/apps/portal/package.json +++ b/apps/portal/package.json @@ -10,7 +10,7 @@ "@react-email/render": "^1.1.2", "@t3-oss/env-nextjs": "^0.13.8", "@trycompai/analytics": "workspace:*", - "@trycompai/db": "1.3.20", + "@trycompai/db": "1.3.21", "@trycompai/email": "workspace:*", "@trycompai/kv": "workspace:*", "@trycompai/ui": "workspace:*", diff --git a/apps/portal/src/utils/s3.ts b/apps/portal/src/utils/s3.ts index 85ba857d2..52f32752c 100644 --- a/apps/portal/src/utils/s3.ts +++ b/apps/portal/src/utils/s3.ts @@ -5,6 +5,7 @@ import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; const APP_AWS_REGION = process.env.APP_AWS_REGION; const APP_AWS_ACCESS_KEY_ID = process.env.APP_AWS_ACCESS_KEY_ID; const APP_AWS_SECRET_ACCESS_KEY = process.env.APP_AWS_SECRET_ACCESS_KEY; +const APP_AWS_ENDPOINT = process.env.APP_AWS_ENDPOINT; export const BUCKET_NAME = process.env.APP_AWS_BUCKET_NAME; @@ -23,11 +24,13 @@ if (!APP_AWS_ACCESS_KEY_ID || !APP_AWS_SECRET_ACCESS_KEY || !BUCKET_NAME || !APP // Create a single S3 client instance // Add null checks or assertions if the checks above don't guarantee non-null values export const s3Client = new S3Client({ + endpoint: APP_AWS_ENDPOINT || undefined, region: APP_AWS_REGION!, credentials: { accessKeyId: APP_AWS_ACCESS_KEY_ID!, secretAccessKey: APP_AWS_SECRET_ACCESS_KEY!, }, + forcePathStyle: !!APP_AWS_ENDPOINT, }); // Ensure BUCKET_NAME is exported and non-null checked if needed elsewhere explicitly diff --git a/bun.lock b/bun.lock index 157ac190b..95fb996d3 100644 --- a/bun.lock +++ b/bun.lock @@ -210,7 +210,7 @@ "@tiptap/extension-table-row": "^3.4.4", "@trigger.dev/react-hooks": "4.0.6", "@trigger.dev/sdk": "4.0.6", - "@trycompai/db": "^1.3.20", + "@trycompai/db": "1.3.21", "@trycompai/email": "workspace:*", "@types/canvas-confetti": "^1.9.0", "@types/react-syntax-highlighter": "^15.5.13", @@ -370,7 +370,7 @@ }, "packages/db": { "name": "@trycompai/db", - "version": "1.3.20", + "version": "1.3.21", "bin": { "comp-prisma-postinstall": "./dist/postinstall.js", }, diff --git a/packages/db/package.json b/packages/db/package.json index 9d75a674e..756e01425 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -1,7 +1,7 @@ { "name": "@trycompai/db", "description": "Database package with Prisma client and schema for Comp AI", - "version": "1.3.20", + "version": "1.3.21", "dependencies": { "@prisma/client": "^6.13.0", "dotenv": "^16.4.5", diff --git a/packages/db/prisma/migrations/20260113191630_add_task_automation_status/migration.sql b/packages/db/prisma/migrations/20260113191630_add_task_automation_status/migration.sql new file mode 100644 index 000000000..57f04a7e6 --- /dev/null +++ b/packages/db/prisma/migrations/20260113191630_add_task_automation_status/migration.sql @@ -0,0 +1,5 @@ +-- CreateEnum +CREATE TYPE "TaskAutomationStatus" AS ENUM ('AUTOMATED', 'MANUAL'); + +-- AlterTable +ALTER TABLE "Task" ADD COLUMN "automationStatus" "TaskAutomationStatus" NOT NULL DEFAULT 'AUTOMATED'; diff --git a/packages/db/prisma/migrations/20260113203400_add_task_template_automation_status/migration.sql b/packages/db/prisma/migrations/20260113203400_add_task_template_automation_status/migration.sql new file mode 100644 index 000000000..a9a183e23 --- /dev/null +++ b/packages/db/prisma/migrations/20260113203400_add_task_template_automation_status/migration.sql @@ -0,0 +1,2 @@ +-- AlterTable +ALTER TABLE "FrameworkEditorTaskTemplate" ADD COLUMN "automationStatus" "TaskAutomationStatus" NOT NULL DEFAULT 'AUTOMATED'; diff --git a/packages/db/prisma/schema/framework-editor.prisma b/packages/db/prisma/schema/framework-editor.prisma index 9f37d1535..3d3fc288c 100644 --- a/packages/db/prisma/schema/framework-editor.prisma +++ b/packages/db/prisma/schema/framework-editor.prisma @@ -64,11 +64,12 @@ model FrameworkEditorPolicyTemplate { } model FrameworkEditorTaskTemplate { - id String @id @default(dbgenerated("generate_prefixed_cuid('frk_tt'::text)")) - name String - description String - frequency Frequency // Using the enum from shared.prisma - department Departments // Using the enum from shared.prisma + id String @id @default(dbgenerated("generate_prefixed_cuid('frk_tt'::text)")) + name String + description String + frequency Frequency // Using the enum from shared.prisma + department Departments // Using the enum from shared.prisma + automationStatus TaskAutomationStatus @default(AUTOMATED) controlTemplates FrameworkEditorControlTemplate[] diff --git a/packages/db/prisma/schema/task.prisma b/packages/db/prisma/schema/task.prisma index 52c0ec835..ba33a2e4b 100644 --- a/packages/db/prisma/schema/task.prisma +++ b/packages/db/prisma/schema/task.prisma @@ -3,8 +3,9 @@ model Task { id String @id @default(dbgenerated("generate_prefixed_cuid('tsk'::text)")) title String description String - status TaskStatus @default(todo) - frequency TaskFrequency? + status TaskStatus @default(todo) + automationStatus TaskAutomationStatus @default(AUTOMATED) + frequency TaskFrequency? department Departments? @default(none) order Int @default(0) @@ -46,3 +47,8 @@ enum TaskFrequency { quarterly yearly } + +enum TaskAutomationStatus { + AUTOMATED + MANUAL +} diff --git a/packages/db/prisma/seed/frameworkEditorSchemas.ts b/packages/db/prisma/seed/frameworkEditorSchemas.ts index 607424d1a..80b470c89 100644 --- a/packages/db/prisma/seed/frameworkEditorSchemas.ts +++ b/packages/db/prisma/seed/frameworkEditorSchemas.ts @@ -121,6 +121,7 @@ export const FrameworkEditorTaskTemplateSchema = z.object({ description: z.string(), frequency: z.string(), // Placeholder for Frequency enum department: z.string(), // Placeholder for Departments enum + automationStatus: z.enum(['AUTOMATED', 'MANUAL']).optional(), // @default(AUTOMATED) // controlTemplates: FrameworkEditorControlTemplate[] - relational, omitted createdAt: z .preprocess( diff --git a/packages/docs/openapi.json b/packages/docs/openapi.json index 10445dcff..a933b7f3f 100644 --- a/packages/docs/openapi.json +++ b/packages/docs/openapi.json @@ -5353,6 +5353,40 @@ ] } }, + "/v1/policies/download-all": { + "get": { + "description": "Generates a PDF bundle containing all published policies with organization branding and returns a signed download URL", + "operationId": "PoliciesController_downloadAllPolicies_v1", + "parameters": [ + { + "name": "X-Organization-Id", + "in": "header", + "description": "Organization ID (required for session auth, optional for API key auth)", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Signed URL for PDF bundle returned" + }, + "404": { + "description": "No published policies found" + } + }, + "security": [ + { + "apikey": [] + } + ], + "summary": "Download all published policies as a single PDF", + "tags": [ + "Policies" + ] + } + }, "/v1/policies/{id}": { "get": { "description": "Returns a specific policy by ID for the authenticated organization. Supports both API key authentication (X-API-Key header) and session authentication (cookies + X-Organization-Id header).", @@ -5764,6 +5798,66 @@ ] } }, + "/v1/attachments/{attachmentId}/download": { + "get": { + "description": "Generate a fresh signed URL for downloading any attachment", + "operationId": "AttachmentsController_getAttachmentDownloadUrl_v1", + "parameters": [ + { + "name": "X-Organization-Id", + "in": "header", + "description": "Organization ID (required for session auth, optional for API key auth)", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "attachmentId", + "required": true, + "in": "path", + "description": "Unique attachment identifier", + "schema": { + "example": "att_abc123def456", + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Download URL generated successfully", + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "downloadUrl": { + "type": "string", + "description": "Signed URL for downloading the file", + "example": "https://bucket.s3.amazonaws.com/path/to/file.pdf?signature=..." + }, + "expiresIn": { + "type": "number", + "description": "URL expiration time in seconds", + "example": 900 + } + } + } + } + } + } + }, + "security": [ + { + "apikey": [] + } + ], + "summary": "Get attachment download URL", + "tags": [ + "Attachments" + ] + } + }, "/v1/device-agent/mac": { "get": { "description": "Downloads the Comp AI Device Agent installer for macOS as a DMG file. The agent helps monitor device compliance and security policies. Supports both API key authentication (X-API-Key header) and session authentication (cookies + X-Organization-Id header).", @@ -5876,66 +5970,6 @@ ] } }, - "/v1/attachments/{attachmentId}/download": { - "get": { - "description": "Generate a fresh signed URL for downloading any attachment", - "operationId": "AttachmentsController_getAttachmentDownloadUrl_v1", - "parameters": [ - { - "name": "X-Organization-Id", - "in": "header", - "description": "Organization ID (required for session auth, optional for API key auth)", - "required": false, - "schema": { - "type": "string" - } - }, - { - "name": "attachmentId", - "required": true, - "in": "path", - "description": "Unique attachment identifier", - "schema": { - "example": "att_abc123def456", - "type": "string" - } - } - ], - "responses": { - "200": { - "description": "Download URL generated successfully", - "content": { - "application/json": { - "schema": { - "type": "object", - "properties": { - "downloadUrl": { - "type": "string", - "description": "Signed URL for downloading the file", - "example": "https://bucket.s3.amazonaws.com/path/to/file.pdf?signature=..." - }, - "expiresIn": { - "type": "number", - "description": "URL expiration time in seconds", - "example": 900 - } - } - } - } - } - } - }, - "security": [ - { - "apikey": [] - } - ], - "summary": "Get attachment download URL", - "tags": [ - "Attachments" - ] - } - }, "/v1/tasks": { "get": { "description": "Retrieve all tasks for the authenticated organization", @@ -8519,6 +8553,31 @@ ] } }, + "/v1/trust-access/access/{token}/policies/download-all-zip": { + "get": { + "description": "Generate ZIP archive containing individual watermarked PDFs for each policy", + "operationId": "TrustAccessController_downloadAllPoliciesAsZip_v1", + "parameters": [ + { + "name": "token", + "required": true, + "in": "path", + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Download URL for ZIP archive returned" + } + }, + "summary": "Download all policies as ZIP with individual PDFs", + "tags": [ + "Trust Access" + ] + } + }, "/v1/trust-access/access/{token}/compliance-resources": { "get": { "description": "Get list of uploaded compliance certificates for the organization", diff --git a/packages/docs/self-hosting/env-reference.mdx b/packages/docs/self-hosting/env-reference.mdx index e8ec998e9..5da63aa08 100644 --- a/packages/docs/self-hosting/env-reference.mdx +++ b/packages/docs/self-hosting/env-reference.mdx @@ -75,6 +75,7 @@ These variables are required for a functional Docker deployment: | `APP_AWS_ORG_ASSETS_BUCKET` | app | runtime | conditional | Organization logos, compliance certs | | `APP_AWS_QUESTIONNAIRE_UPLOAD_BUCKET` | app | runtime | conditional | Security questionnaire uploads | | `APP_AWS_KNOWLEDGE_BASE_BUCKET` | app | runtime | conditional | Knowledge base documents | +| `APP_AWS_ENDPOINT` | app, portal | runtime | optional | URL for alternative S3-compatible providers | AWS S3 variables are required for file upload features (attachments, logos, questionnaires). Without them, these features will fail.