diff --git a/.cursor/plans/static-registry.md b/.cursor/plans/static-registry.md deleted file mode 100644 index 3c26a355..00000000 --- a/.cursor/plans/static-registry.md +++ /dev/null @@ -1,295 +0,0 @@ -This plan will give you a clear, step-by-step guide to building the static component registry within the existing "apps/docs" project. - ---- - -### **High-Level Plan: Phase 1 - Static Registry** - -The goal is to create a robust API for static components that is fully compatible with the `shadcn-cli` and can be tested thoroughly. - -### **1. The Data Layer: Defining the "Source of Truth"** - -This is the most critical part. A well-defined data structure will make the rest of the implementation smooth. - -#### **A. Directory Structure** - -The directory structure remains the same, providing a clean organization for your templates. - -``` -src/ -└── registry/ - ├── lib/ - │ ├── types.ts // NEW: Centralized type definitions - │ ├── validator.ts // Build-time validation script - │ └── utils.ts // File system and data transformation logic - └── templates/ - ├── button/ - │ ├── _meta.ts - │ └── button.tsx - └── icon/ - ├── _meta.ts - └── index.ts -``` - -#### **B. Type Definitions (`types.ts`)** - -Create a central file for your internal data types. This ensures consistency and provides excellent developer experience with TypeScript. - -```typescript -// src/registry/lib/types.ts -import { z } from "zod"; - -// Defines a single file within a template -export const templateFileSchema = z.object({ - sourceFileName: z.string(), - destinationPath: z.string(), -}); - -// Defines the metadata for a single template (_meta.ts) -export const templateMetadataSchema = z.object({ - name: z.string(), - type: z.literal("static"), // For Phase 1, we only allow 'static' - description: z.string(), - categories: z.array(z.enum(["component", "page", "utility", "hook"])), - files: z.array(templateFileSchema), -}); - -export type TemplateFile = z.infer; -export type TemplateMetadata = z.infer; -``` - -#### **C. Example Metadata (`_meta.ts`)** - -Here is how you would define a `button` component using the new types. - -```typescript -// src/registry/templates/button/_meta.ts -import type { TemplateMetadata } from "@/registry/lib/types"; - -export const meta: TemplateMetadata = { - name: "button", - type: "static", - description: "Displays a button or a link.", - categories: ["component"], - files: [ - { - // The name of the file within this directory - sourceFileName: "button.tsx", - // The path where the file will be placed in the user's project - destinationPath: "src/components/ui/button.tsx", - }, - ], -}; -``` - -### **2. The API Layer: Building the Registry with Next.js & Hono** - -This layer reads from your data source and exposes it in the Shadcn-compatible format. - -#### **A. API Route Handler (`route.ts`)** - -The Hono router remains the core of the API, providing flexibility for the future. - -```typescript -// src/app/api/registry/[...slug]/route.ts -import { Hono } from "hono"; -import { handle } from "hono/vercel"; -import { getRegistryIndex, getStaticComponent } from "@/registry/lib/utils"; - -export const runtime = "edge"; - -const app = new Hono().basePath("/api/registry"); - -// Serves the index of all available components -app.get("/index.json", async (c) => { - try { - const index = await getRegistryIndex(); - return c.json(index); - } catch (error) { - return c.json({ error: "Failed to fetch registry index." }, 500); - } -}); - -// Serves the data for a single component -// The :style param is part of the shadcn spec, we'll include it for compatibility -app.get("/:style/:name.json", async (c) => { - const { name } = c.req.param(); - try { - const component = await getStaticComponent(name); - if (!component) { - return c.json({ error: "Component not found." }, 404); - } - return c.json(component); - } catch (error) { - return c.json({ error: "Failed to fetch component." }, 500); - } -}); - -export const GET = handle(app); -``` - -#### **B. Registry Utilities (`utils.ts`)** - -These functions are updated to handle the new `sourceFileName` and `destinationPath` structure. - -```typescript -// src/registry/lib/utils.ts -import fs from "fs/promises"; -import path from "path"; -import type { TemplateMetadata } from "./types"; - -const templatesPath = path.join(process.cwd(), "src/registry/templates"); - -// Builds the index.json file -export async function getRegistryIndex() { - const componentDirs = await fs.readdir(templatesPath, { - withFileTypes: true, - }); - const index = []; - - for (const dir of componentDirs) { - if (dir.isDirectory()) { - const { meta }: { meta: TemplateMetadata } = await import( - `@/registry/templates/${dir.name}/_meta` - ); - index.push({ - name: meta.name, - type: meta.type, - categories: meta.categories, - files: meta.files.map((f) => f.destinationPath), // shadcn index uses the destination paths - }); - } - } - return index; -} - -// Builds the JSON for a single static component -export async function getStaticComponent(name: string) { - const { meta }: { meta: TemplateMetadata } = await import( - `@/registry/templates/${name}/_meta` - ); - - const componentFiles = await Promise.all( - meta.files.map(async (file) => { - const contentPath = path.join(templatesPath, name, file.sourceFileName); - const content = await fs.readFile(contentPath, "utf-8"); - return { - // The `name` key in the output should be the filename part of the destination - name: path.basename(file.destinationPath), - path: file.destinationPath, - content: content, // The critical content key - }; - }), - ); - - return { - name: meta.name, - type: meta.type, - files: componentFiles, - }; -} -``` - -#### **C. Build-Time Validation (`validator.ts`)** - -This script is crucial for preventing regressions. It should be run as part of your CI/CD pipeline or build process. - -```typescript -// src/registry/lib/validator.ts -import fs from "fs/promises"; -import path from "path"; -import { templateMetadataSchema } from "./types"; - -const templatesPath = path.join(process.cwd(), "src/registry/templates"); - -async function validateRegistry() { - console.log("🔍 Validating registry templates..."); - const componentDirs = await fs.readdir(templatesPath, { - withFileTypes: true, - }); - let errorCount = 0; - - for (const dir of componentDirs) { - if (dir.isDirectory()) { - const metaPath = path.join(templatesPath, dir.name, "_meta.ts"); - const { meta } = await import(metaPath); - - // 1. Validate metadata against Zod schema - const validationResult = templateMetadataSchema.safeParse(meta); - if (!validationResult.success) { - console.error(`❌ Invalid metadata in ${dir.name}/_meta.ts:`); - console.error(validationResult.error.flatten()); - errorCount++; - } - - // 2. Validate that all source files exist - for (const file of meta.files) { - const sourcePath = path.join( - templatesPath, - dir.name, - file.sourceFileName, - ); - try { - await fs.access(sourcePath); - } catch { - console.error( - `❌ Missing source file: ${file.sourceFileName} referenced in ${dir.name}/_meta.ts`, - ); - errorCount++; - } - } - } - } - - if (errorCount > 0) { - console.error(`\nValidation failed with ${errorCount} error(s).`); - process.exit(1); // Fail the build - } else { - console.log("✅ Registry validation successful!"); - } -} - -validateRegistry(); -``` - -To run this, add a script to your `package.json`: - -```json -{ - "scripts": { - "build": "npm run registry:validate && next build", - "registry:validate": "node src/registry/lib/validator.ts" - } -} -``` - -### **3. Testing with Vitest** - -Your tests should confirm that the API output adheres to the Shadcn spec. - -```typescript -// src/app/api/registry/route.test.ts -import { describe, it, expect, vi } from "vitest"; -// You will need to mock the `utils.ts` functions to test the API routes in isolation. - -vi.mock("@/registry/lib/utils", () => ({ - getRegistryIndex: vi.fn(), - getStaticComponent: vi.fn(), -})); - -describe("Registry API - Phase 1", () => { - it("GET /api/registry/index.json should return a valid index", async () => { - // Mock the return value of getRegistryIndex - // Make a request to the endpoint - // Assert that the response contains `name`, `type`, `categories`, and `files` (as an array of strings). - }); - - it("GET /api/registry/default/button.json should return a valid component", async () => { - // Mock the return value of getStaticComponent - // Make a request to the endpoint - // Assert that the top-level response has `name`, `type`, and `files`. - // Assert that each object in the `files` array has `name`, `path`, and `content`. - }); -}); -``` - -This detailed plan for Phase 1 provides a robust, testable, and scalable foundation. By focusing on data integrity and API compatibility first, you set yourself up for success when implementing dynamic components and authentication later. diff --git a/packages/fmodata/BATCH_ERROR_FIX.md b/packages/fmodata/BATCH_ERROR_FIX.md deleted file mode 100644 index c078fd61..00000000 --- a/packages/fmodata/BATCH_ERROR_FIX.md +++ /dev/null @@ -1,124 +0,0 @@ -# Batch Operation Error Handling Fix - -## Problem - -When batch operations encountered an error (e.g., querying a non-existent table), the error message was vague and unhelpful: - -``` -Error [ResponseStructureError]: Invalid response structure: expected 'value' property to be an array - -{ - timestamp: 2025-12-05T22:53:53.218Z, - kind: 'ResponseStructureError', - expected: "'value' property to be an array", - received: undefined -} -``` - -This error appeared to be a validation error, but it was actually masking the real FileMaker OData error response that contained useful information like: -- Error code: `-1020` -- Error message: `Table 'Purchase_Orders' not defined in database` - -## Root Cause - -The `processResponse` methods in all builder classes (QueryBuilder, InsertBuilder, UpdateBuilder, DeleteBuilder, RecordBuilder) were not checking for HTTP error responses before attempting to parse the response body as data. - -In batch operations, when FileMaker returns a 404 error with an error JSON body, the builders would try to validate it as a data response, leading to the vague `ResponseStructureError`. - -## Solution - -### 1. Created Shared Error Parser (`src/client/error-parser.ts`) - -Created a new helper function `parseErrorResponse` that: -- Checks for JSON error responses from FileMaker -- Extracts the OData error structure (`{ error: { code, message } }`) -- Returns appropriate error objects: - - `ODataError` for standard OData errors - - `SchemaLockedError` for code 303 errors - - `HTTPError` as fallback - -### 2. Updated All Builder `processResponse` Methods - -Added error checking at the start of each `processResponse` method: - -```typescript -async processResponse(response: Response, options?: ExecuteOptions) { - // Check for error responses (important for batch operations) - if (!response.ok) { - const error = await parseErrorResponse( - response, - response.url || `/${this.databaseName}/${this.tableName}`, - ); - return { data: undefined, error }; - } - - // ... rest of response processing -} -``` - -Updated files: -- `src/client/query-builder.ts` -- `src/client/insert-builder.ts` -- `src/client/update-builder.ts` -- `src/client/delete-builder.ts` -- `src/client/record-builder.ts` - -## Result - -### Before -``` -Error [ResponseStructureError]: Invalid response structure: expected 'value' property to be an array -kind: 'ResponseStructureError' -expected: "'value' property to be an array" -received: undefined -``` - -### After -``` -Error [ODataError]: OData error: Table 'Purchase_Orders' not defined in database -kind: 'ODataError' -code: '-1020' -message: "OData error: Table 'Purchase_Orders' not defined in database" -details: { code: '-1020', message: "Table 'Purchase_Orders' not defined in database" } -``` - -## Testing - -### Unit Tests -- ✅ All existing tests pass (622 tests) -- ✅ Enhanced `tests/batch.test.ts` with error detail assertions -- ✅ Added `tests/batch-error-messages.test.ts` demonstrating the fix - -### Example Usage - -```typescript -const result = await db - .batch([punchlistQuery, purchaseOrdersQuery, ticketsQuery]) - .execute(); - -const [r1, r2, r3] = result.results; - -// Check if query failed -if (r2.error) { - if (isODataError(r2.error)) { - console.log(`Error Code: ${r2.error.code}`); // -1020 - console.log(`Error Message: ${r2.error.message}`); // Table not defined - console.log(`HTTP Status: ${r2.status}`); // 404 - } -} -``` - -## Benefits - -1. **Better Developer Experience**: Error messages now contain actionable information -2. **Easier Debugging**: Can identify the exact issue (table name typo, missing table, etc.) -3. **Consistent Error Handling**: Batch operations now return the same error types as single operations -4. **Type Safety**: Can use type guards (`isODataError`, `isHTTPError`) to handle specific error types - -## Backward Compatibility - -✅ **Fully backward compatible** -- Existing error handling code continues to work -- Added functionality doesn't break existing APIs -- All 622 existing tests pass without modification - diff --git a/packages/fmodata/IMPLEMENTATION_SUMMARY.md b/packages/fmodata/IMPLEMENTATION_SUMMARY.md new file mode 100644 index 00000000..79798795 --- /dev/null +++ b/packages/fmodata/IMPLEMENTATION_SUMMARY.md @@ -0,0 +1,275 @@ +# ORM API Implementation Summary + +## Overview + +Successfully implemented a Drizzle-inspired ORM API for fmodata that provides enhanced type safety and developer experience while maintaining full compatibility with the existing API. + +## Completed Features + +### ✅ 1. Field Builder System (`src/orm/field-builders.ts`) + +Created a fluent field builder API with: + +- **Field Types**: `textField()`, `numberField()`, `dateField()`, `timeField()`, `timestampField()`, `containerField()`, `calcField()` +- **Chainable Methods**: + - `.primaryKey()` - Mark as primary key (auto read-only) + - `.notNull()` - Make non-nullable + - `.readOnly()` - Exclude from insert/update + - `.entityId(id)` - Assign FileMaker field ID + - `.readValidator(validator)` - Transform/validate data when reading from database + - `.writeValidator(validator)` - Transform/validate data when writing to database + +### ✅ 2. Column Reference System (`src/orm/column.ts`) + +Created `Column` class that: + +- Carries type information for TypeScript inference +- Stores field name, entity ID, table name, and table entity ID +- Provides methods to get identifiers (field/table) +- Supports both field names and entity IDs +- Includes `isColumn()` type guard + +### ✅ 3. Filter Operators (`src/orm/operators.ts`) + +Implemented comprehensive operator functions: + +**Comparison**: `eq()`, `ne()`, `gt()`, `gte()`, `lt()`, `lte()` +**String**: `contains()`, `startsWith()`, `endsWith()` +**Array**: `inArray()`, `notInArray()` +**Null**: `isNull()`, `isNotNull()` +**Logical**: `and()`, `or()`, `not()` +**OrderBy**: `asc()`, `desc()` - Create OrderByExpression for type-safe sorting + +Features: + +- Support column-to-value comparisons +- Support column-to-column comparisons (cross-table) +- Convert to OData filter syntax +- Handle entity ID transformation +- Proper SQL escaping (single quotes) + +### ✅ 4. Table Occurrence Factory (`src/orm/table.ts`) + +Created `fmTableOccurrence()` function that: + +- Takes field builders as input +- Generates Zod schema automatically (output and input schemas) +- Creates Column references for each field +- Extracts metadata (primary key, required, read-only, entity IDs) +- Supports `navigationPaths` for runtime validation of expand/navigate operations +- Supports `defaultSelect` option ("all", "schema", or function) for automatic field selection +- Returns object with both metadata (via Symbols) and column accessors + +### ✅ 5. Query Builder Updates (`src/client/query-builder.ts`) + +Enhanced QueryBuilder to support: + +**Select Method**: + +- Accepts object with Column references for type-safe field selection +- `.select({ id: users.id, name: users.name })` ✓ +- Supports field renaming: `.select({ userId: users.id, userName: users.name })` ✓ +- String-based select still supported via legacy API + +**Where Method**: + +- New `.where()` method accepts FilterExpression +- Converts operator expressions to OData syntax +- Respects `useEntityIds` setting + +**OrderBy Method**: + +- Accepts Column references, OrderByExpression, or strings +- `.orderBy(users.name)` ✓ (single column, ascending by default) +- `.orderBy([users.name, "asc"])` ✓ (single column with direction) +- `.orderBy(asc(users.name), desc(users.age))` ✓ (variadic with helpers) +- `.orderBy([[users.name, "asc"], [users.createdAt, "desc"]])` ✓ (array syntax) + +### ✅ 6. Navigation Validation (`src/client/builders/expand-builder.ts`, `src/client/entity-set.ts`, `src/client/record-builder.ts`) + +Added runtime validation for navigation operations: + +- Validates `expand()` operations using `getNavigationPaths()` helper +- Validates `navigate()` operations in EntitySet and RecordBuilder +- Checks if relation name is in table's `navigationPaths` array +- Throws descriptive error if invalid path is attempted +- Works with new ORM table occurrences +- Backward compatible with old API + +### ✅ 7. Default Select Feature (`src/client/entity-set.ts`, `src/client/builders/default-select.ts`) + +Implemented automatic field selection based on table configuration: + +- `defaultSelect: "all"` - Select all fields (default behavior) +- `defaultSelect: "schema"` - Select only fields defined in schema +- `defaultSelect: (columns) => {...}` - Custom function to select specific columns +- Automatically applied in `list()` and `get()` if no explicit `select()` is called + +### ✅ 8. Documentation + +Created comprehensive documentation: + +- **`docs/ORM_API.md`**: Complete API guide with examples +- **`scripts/dreams.ts`**: Updated with working examples +- **`tests/orm-api.test.ts`**: Test suite covering all features + +### ✅ 9. Exports (`src/index.ts`, `src/orm/index.ts`) + +Updated exports to include: + +- All field builder functions +- Column and operator types/functions +- fmTableOccurrence function +- Proper TypeScript types + +## Key Design Decisions + +### 1. Query Order: `from().select().where()` + +Kept the existing pattern (not Drizzle's `select().from()`) for consistency and single-table query ergonomics. + +### 2. Select Syntax + +Support both string-based and column-based selection: + +- String-based (legacy): `select("id", "name")` - variadic string arguments +- Column-based (new ORM): `select({ id: users.id, name: users.name })` - object with column references, supports field renaming + +### 3. Navigation Validation + +Simple `navigationPaths: string[]` array with runtime validation when expanding/navigating. Uses `getNavigationPaths()` helper to access paths from FMTable. Throws descriptive error if relation name is not in paths. + +### 4. Cross-Table Operations + +Operators support column-to-column comparisons: `eq(users.id, contacts.id_user)` + +### 5. Default Select + +Tables can define `defaultSelect` option to automatically select fields when `list()` or `get()` is called without explicit `select()`. Supports "all", "schema", or custom function. + +### 6. Backward Compatibility + +New API coexists with old API. Both exported from main package. No breaking changes. + +## File Structure + +``` +src/ +├── orm/ +│ ├── field-builders.ts # Field builder classes and factories +│ ├── column.ts # Column reference type +│ ├── operators.ts # Filter and OrderBy operator functions +│ ├── table.ts # fmTableOccurrence function and FMTable class +│ └── index.ts # Barrel exports +├── client/ +│ ├── query/ +│ │ └── query-builder.ts # Enhanced with Column/operator support +│ ├── builders/ +│ │ ├── expand-builder.ts # Expand logic with navigation validation +│ │ └── default-select.ts # Default select helper functions +│ ├── entity-set.ts # EntitySet with defaultSelect support +│ └── ... # Other existing files +└── index.ts # Main exports (old + new API) + +docs/ +└── ORM_API.md # Complete API documentation + +scripts/ +└── dreams.ts # Updated with working examples + +tests/ +└── orm-api.test.ts # Test suite for new API +``` + +## Usage Example + +```typescript +import { + fmTableOccurrence, + textField, + numberField, + timestampField, + eq, + and, + gt, + asc, + FMServerConnection, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// Define table with field builders +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + age: numberField().entityId("FMFID:3"), + status: textField() + .readValidator(z.enum(["active", "pending", "inactive"])) + .entityId("FMFID:4"), + createdAt: timestampField().readOnly().entityId("FMFID:5"), + }, + { + entityId: "FMTID:100", + navigationPaths: ["contacts"], + }, +); + +// Connect +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "key" }, +}); +const db = connection.database("MyDB.fmp12"); + +// Query with new API +const result = await db + .from(users) + .list() + .select({ + id: users.id, + name: users.name, + age: users.age, + }) + .where(and(eq(users.status, "active"), gt(users.age, 18))) + .orderBy(asc(users.name)) + .execute(); +``` + +## Type Safety Benefits + +1. **Enum Autocomplete**: `eq(users.status, "active")` - "active" autocompletes from enum validator +2. **Column Type Checking**: Operators validate value types against column types +3. **Select Field Validation**: Column references provide type-safe field selection with renaming support +4. **Cross-Table Safety**: Column references carry table information for validation +5. **Navigation Validation**: Runtime checks ensure valid expand/navigate paths +6. **Insert/Update Type Safety**: Read-only fields automatically excluded, required fields enforced +7. **Input/Output Transformation**: Separate validators for reading (readValidator) and writing (writeValidator) + +## Implementation Status + +✅ All core features completed: + +1. ✅ Field builders with read/write validators +2. ✅ Column references with type safety +3. ✅ Filter operators (comparison, string, array, null, logical) +4. ✅ OrderBy operators (asc, desc) +5. ✅ Table factory (fmTableOccurrence) with Symbol-based metadata +6. ✅ Query builder updates (select, where, orderBy) +7. ✅ Navigation validation (expand, navigate) +8. ✅ Default select feature + +✅ No linting errors +✅ Documentation complete +✅ Tests written +✅ Examples updated + +## Next Steps (Optional) + +Potential future enhancements: + +1. Add more operator types (between, like with wildcards, etc.) +2. Support for aggregate functions (count, sum, avg, etc.) +3. Type-safe joins (if OData supports them) +4. Schema migration helpers +5. Code generation from FileMaker metadata diff --git a/packages/fmodata/README.md b/packages/fmodata/README.md index cbffee5a..13950147 100644 --- a/packages/fmodata/README.md +++ b/packages/fmodata/README.md @@ -26,8 +26,10 @@ Here's a minimal example to get you started: ```typescript import { FMServerConnection, - defineBaseTable, - defineTableOccurrence, + fmTableOccurrence, + textField, + numberField, + eq, } from "@proofkit/fmodata"; import { z } from "zod/v4"; @@ -44,30 +46,21 @@ const connection = new FMServerConnection({ }, }); -// 2. Define your table schema -const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - active: z.boolean(), - }, - idField: "id", -}); - -// 3. Create a table occurrence -const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, +// 2. Define your table schema using field builders +const users = fmTableOccurrence("users", { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField() + .readValidator(z.coerce.boolean()) + .writeValidator(z.boolean().transform((v) => (v ? 1 : 0))), }); -// 4. Create a database instance -const db = connection.database("MyDatabase.fmp12", { - occurrences: [usersTO], -}); +// 3. Create a database instance +const db = connection.database("MyDatabase.fmp12"); -// 5. Query your data -const { data, error } = await db.from("users").list().execute(); +// 4. Query your data +const { data, error } = await db.from(users).list().execute(); if (error) { console.error(error); @@ -86,8 +79,7 @@ This library relies heavily on the builder pattern for defining your queries and As such, there are layers to the library to help you build your queries and operations. - `FMServerConnection` - hold server connection details and authentication -- `BaseTable` - defines the fields and validators for a base table -- `TableOccurrence` - references a base table, and other table occurrences for navigation +- `FMTable` (created via `fmTableOccurrence()`) - defines the fields, validators, and metadata for a table occurrence - `Database` - connects the table occurrences to the server connection ### FileMaker Server prerequisites @@ -100,7 +92,7 @@ To use this library you need: A note on best practices: -OData relies entirely on the table occurances in the relationship graph for data access. Relationships between table occurrences are also used, but maybe not as you expect (in short, only the simplest relationships are supported). Given these constraints, it may be best for you to have a seperate FileMaker file for your OData connection, using external data sources to link to your actual data. We've found this especially helpful for larger projects that have very large graphs with lots of duplicated table occurances compared to actual base tables. +OData relies entirely on the table occurances in the relationship graph for data access. Relationships between table occurrences are also used, but maybe not as you expect (in short, only the simplest relationships are supported). Given these constraints, it may be best for you to have a seperate FileMaker file for your OData connection, using external data sources to link to your actual data file. We've found this especially helpful for larger projects that have very large graphs with lots of redundant table occurances compared to actual number of base tables. ### Server Connection @@ -127,87 +119,118 @@ const connection = new FMServerConnection({ ### Schema Definitions -This library relies on a schema-first approach for good type-safety and optional runtime validation. These are abstracted into BaseTable and TableOccurrence types to match FileMaker concepts. +This library relies on a schema-first approach for good type-safety and optional runtime validation. Use **`fmTableOccurrence()`** with field builders to create your schemas. This provides full TypeScript type inference for field names in queries. + +#### Field Builders + +Field builders provide a fluent API for defining table fields with type-safe metadata. These field types map directly to the FileMaker field types + +- `textField()` +- `numberField()` +- `dateField()` +- `timeField()` +- `timestampField()` +- `containerField()` +- `calcField()` -Use **`defineBaseTable()`** and **`defineTableOccurrence()`** to create your schemas. These functions provide full TypeScript type inference for field names in queries. +Each field builder supports chainable methods: -A `BaseTable` defines the schema for your FileMaker table using Standard Schema. These examples show zod, but you can use any other validation library that supports Standard Schema. +- `.primaryKey()` - Mark as primary key (automatically read-only) +- `.notNull()` - Make field non-nullable (required for inserts) +- `.readOnly()` - Exclude from insert/update operations +- `.entityId(id)` - Assign FileMaker field ID (FMFID), allowing your API calls to survive FileMaker name changes +- `.readValidator(validator)` - Transform/validate data when reading from database +- `.writeValidator(validator)` - Transform/validate data when writing to database + +#### Defining Tables + +Use `fmTableOccurrence()` to define a table with field builders: ```typescript import { z } from "zod/v4"; -import { defineBaseTable } from "@proofkit/fmodata"; - -const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - email: z.string(), - phone: z.string().optional(), - createdAt: z.string(), +import { + fmTableOccurrence, + textField, + numberField, + timestampField, +} from "@proofkit/fmodata"; + +const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().notNull().entityId("FMFID:3"), + phone: textField().entityId("FMFID:4"), // Optional (nullable by default) + createdAt: timestampField().readOnly().entityId("FMFID:5"), }, - idField: "id", // The primary key field (automatically read-only) - required: ["phone"], // optional: additional required fields for insert (beyond auto-inferred) - readOnly: ["createdAt"], // optional: fields excluded from insert/update -}); + { + entityId: "FMTID:100", // Optional: FileMaker table occurrence ID + defaultSelect: "schema", // Optional: "all", "schema", or function. Defaults to "schema". + navigationPaths: ["users"], // Optional: valid navigation targets to provide type-errors when navigating/expanding + }, +); ``` -A `TableOccurrence` is the actual entry point for the OData service on the FileMaker server. It allows you to reference the same base table multiple times with different names. +The function returns a table object that provides: -```typescript -import { defineTableOccurrence } from "@proofkit/fmodata"; - -const contactsTO = defineTableOccurrence({ - name: "contacts", // The table occurrence name in FileMaker - baseTable: contactsBase, -}); -``` +- Column references for each field (e.g., `contacts.id`, `contacts.name`) +- Type-safe schema for queries and operations +- Metadata stored via Symbols (hidden from IDE autocomplete) #### Default Field Selection -FileMaker will automatically return all non-container fields from a schema if you don't specify a $select parameter in your query. This library forces you to be a bit more explicit about what fields you want to return so that the types will more accurately reflect the full data you will get back. To modify this behavior, change the `defaultSelect` option when creating the `TableOccurrence`. +FileMaker will automatically return all non-container fields from a schema if you don't specify a $select parameter in your query. This library allows you to configure default field selection behavior using the `defaultSelect` option: ```typescript -// Option 1 (default): "schema" - Select all fields from the schema (same as "all" but more explicit) -const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: "schema", // a $select parameter will be always be added to the query for only the fields you've defined in the BaseTable schema -}); +// Option 1 (default): "schema" - Select all fields from the schema +const users = fmTableOccurrence( + "users", + { + /* fields */ + }, + { + defaultSelect: "schema", // A $select parameter will always be added for only the fields defined in the schema + }, +); -// Option 2: "all" - Select all fields (default behavior) -const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: "all", // Don't always a $select parameter to the query; FileMaker will return all non-container fields from the table -}); +// Option 2: "all" - Select all fields (FileMaker default behavior) +const users = fmTableOccurrence( + "users", + { + /* fields */ + }, + { + defaultSelect: "all", // No $select parameter by default; FileMaker returns all non-container fields + }, +); -// Option 3: Array of field names - Select only specific fields by default -const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: ["username", "email"], // Only select these fields by default -}); +// Option 3: Function - Select specific columns by default +const users = fmTableOccurrence( + "users", + { + /* fields */ + }, + { + defaultSelect: (cols) => ({ + username: cols.username, + email: cols.email, + }), // Only select these fields by default + }, +); -// When you call list(), the defaultSelect is applied automatically -const result = await db.from("users").list().execute(); -// If defaultSelect is ["username", "email"], result.data will only contain those fields +// When you call list() or get(), the defaultSelect is applied automatically +const result = await db.from(users).list().execute(); +// If defaultSelect is a function returning { username, email }, result.data will only contain those fields // You can still override with explicit select() const result = await db - .from("users") + .from(users) .list() - .select("username", "email", "age") // Always overrides at the per-request level + .select({ username: users.username, email: users.email, age: users.age }) // Always overrides at the per-request level .execute(); ``` -Lastly, you can combine all table occurrences into a database instance for the full type-safe experience. This is a method on the main `FMServerConnection` client class. - -```typescript -const db = connection.database("MyDatabase.fmp12", { - occurrences: [contactsTO, usersTO], // Register your table occurrences -}); -``` - ## Querying Data ### Basic Queries @@ -239,9 +262,9 @@ Get a single field value: ```typescript const result = await db - .from("users") + .from(users) .get("user-123") - .getSingleField("email") + .getSingleField(users.email) .execute(); if (result.data) { @@ -251,186 +274,85 @@ if (result.data) { ### Filtering -fmodata provides type-safe filter operations that prevent common errors at compile time. The filter system supports three syntaxes: shorthand, single operator objects, and arrays for multiple operators. - -#### Operator Syntax +fmodata provides type-safe filter operations that prevent common errors at compile time. You can use either the new ORM-style API with operators and column references, or the legacy filter API. -You can use filters in three ways: +#### New ORM-Style API (Recommended) -**1. Shorthand (direct value):** +Use the `where()` method with filter operators and column references for type-safe filtering: ```typescript -.filter({ name: "John" }) -// Equivalent to: { name: [{ eq: "John" }] } -``` - -**2. Single operator object:** - -```typescript -.filter({ age: { gt: 18 } }) -``` - -**3. Array of operators (for multiple operators on same field):** +import { eq, gt, and, or, contains } from "@proofkit/fmodata"; -```typescript -.filter({ age: [{ gt: 18 }, { lt: 65 }] }) -// Result: age gt 18 and age lt 65 -``` - -The array pattern prevents duplicate operators on the same field and allows multiple conditions with implicit AND. - -#### Available Operators - -**String fields:** - -- `eq`, `ne` - equality/inequality -- `contains`, `startswith`, `endswith` - string functions -- `gt`, `ge`, `lt`, `le` - comparison -- `in` - match any value in array - -**Number fields:** - -- `eq`, `ne`, `gt`, `ge`, `lt`, `le` - comparisons -- `in` - match any value in array - -**Boolean fields:** - -- `eq`, `ne` - equality only - -**Date fields:** - -- `eq`, `ne`, `gt`, `ge`, `lt`, `le` - date comparisons -- `in` - match any date in array - -#### Shorthand Syntax - -For simple equality checks, use the shorthand: - -```typescript -const result = await db.from("users").list().filter({ name: "John" }).execute(); -// Equivalent to: { name: [{ eq: "John" }] } -``` - -#### Examples - -```typescript -// Equality filter (single operator) -const activeUsers = await db - .from("users") - .list() - .filter({ active: { eq: true } }) - .execute(); - -// Comparison operators (single operator) -const adultUsers = await db - .from("users") - .list() - .filter({ age: { gt: 18 } }) - .execute(); - -// String operators (single operator) -const johns = await db - .from("users") - .list() - .filter({ name: { contains: "John" } }) - .execute(); - -// Multiple operators on same field (array syntax, implicit AND) -const rangeQuery = await db - .from("users") - .list() - .filter({ age: [{ gt: 18 }, { lt: 65 }] }) - .execute(); - -// Combine filters with AND +// Simple equality const result = await db - .from("users") + .from(users) .list() - .filter({ - and: [{ active: [{ eq: true }] }, { age: [{ gt: 18 }] }], - }) + .where(eq(users.active, true)) .execute(); -// Combine filters with OR -const result = await db - .from("users") - .list() - .filter({ - or: [{ name: [{ eq: "John" }] }, { name: [{ eq: "Jane" }] }], - }) - .execute(); +// Comparison operators +const result = await db.from(users).list().where(gt(users.age, 18)).execute(); -// IN operator +// String operators const result = await db - .from("users") + .from(users) .list() - .filter({ age: [{ in: [18, 21, 25] }] }) + .where(contains(users.name, "John")) .execute(); -// Null checks +// Combine with AND const result = await db - .from("users") + .from(users) .list() - .filter({ deletedAt: [{ eq: null }] }) + .where(and(eq(users.active, true), gt(users.age, 18))) .execute(); -``` - -#### Logical Operators -Combine multiple conditions with `and`, `or`, `not`: - -```typescript +// Combine with OR const result = await db - .from("users") + .from(users) .list() - .filter({ - and: [{ name: [{ contains: "John" }] }, { age: [{ gt: 18 }] }], - }) + .where(or(eq(users.role, "admin"), eq(users.role, "moderator"))) .execute(); ``` -#### Escape Hatch +Available operators: -For unsupported edge cases, pass a raw OData filter string: - -```typescript -const result = await db - .from("users") - .list() - .filter("substringof('John', name)") - .execute(); -``` +- **Comparison**: `eq()`, `ne()`, `gt()`, `gte()`, `lt()`, `lte()` +- **String**: `contains()`, `startsWith()`, `endsWith()` +- **Array**: `inArray()`, `notInArray()` +- **Null**: `isNull()`, `isNotNull()` +- **Logical**: `and()`, `or()`, `not()` ### Sorting -Sort results using `orderBy()`. The method is fully type-safe for typed databases, providing autocomplete for field names and sort directions. +Sort results using `orderBy()`. The method supports both column references (new ORM API) and string field names (legacy API). -#### Single Field +#### Using Column References (New ORM API) ```typescript -// Sort ascending (default direction) -const result = await db.from("users").list().orderBy("name").execute(); +import { asc, desc } from "@proofkit/fmodata"; + +// Single field (ascending by default) +const result = await db.from(users).list().orderBy(users.name).execute(); + +// Single field with explicit direction +const result = await db.from(users).list().orderBy(asc(users.name)).execute(); +const result = await db.from(users).list().orderBy(desc(users.age)).execute(); -// Explicit direction using tuple syntax +// Multiple fields (variadic) const result = await db - .from("users") + .from(users) .list() - .orderBy(["name", "desc"]) + .orderBy(asc(users.lastName), desc(users.firstName)) .execute(); -``` - -#### Multiple Fields - -Use an array of tuples to sort by multiple fields: -```typescript -// Multiple fields with explicit directions +// Multiple fields (array syntax) const result = await db - .from("users") + .from(users) .list() .orderBy([ - ["lastName", "asc"], - ["firstName", "desc"], + [users.lastName, "asc"], + [users.firstName, "desc"], ]) .execute(); ``` @@ -441,32 +363,14 @@ For typed databases, `orderBy()` provides full type safety: ```typescript // ✅ Valid - "name" is a field in the schema -db.from("users").list().orderBy("name"); +db.from(users).list().orderBy(users.name); // ✅ Valid - tuple with field and direction -db.from("users").list().orderBy(["name", "asc"]); - -// ❌ TypeScript Error - "invalid" is not a field -db.from("users").list().orderBy("invalid"); +db.from(users).list().orderBy(asc(users.name)); +db.from(users).list().orderBy(desc(users.name)); -// ❌ TypeScript Error - "name" is not a valid direction -db.from("users").list().orderBy(["email", "name"]); - -// ❌ TypeScript Error - second value must be "asc" or "desc" -db.from("users").list().orderBy(["email", "invalid"]); -``` - -#### Escape Hatch (Untyped Databases) - -For untyped databases (no schema), raw strings are still accepted: - -```typescript -const untypedDb = connection.database("MyDB"); // No occurrences -const result = await untypedDb - .from("users") - .list() - .orderBy("name desc") // Raw string accepted - .execute(); +// ✅ Valid - multiple fields +db.from(users).list().orderBy(asc(users.lastName), desc(users.firstName)); ``` ### Pagination @@ -475,24 +379,29 @@ Control the number of records returned and pagination: ```typescript // Limit results -const result = await db.from("users").list().top(10).execute(); +const result = await db.from(users).list().top(10).execute(); // Skip records (pagination) -const result = await db.from("users").list().top(10).skip(20).execute(); +const result = await db.from(users).list().top(10).skip(20).execute(); // Count total records -const result = await db.from("users").list().count().execute(); +const result = await db.from(users).list().count().execute(); ``` ### Selecting Fields -Select specific fields to return: +Select specific fields to return. You can use either column references (new ORM API) or string field names (legacy API): ```typescript +// New ORM API: Using column references (type-safe, supports renaming) const result = await db - .from("users") + .from(users) .list() - .select("username", "email") + .select({ + username: users.username, + email: users.email, + userId: users.id, // Renamed from "id" to "userId" + }) .execute(); // result.data[0] will only have username and email fields @@ -504,9 +413,9 @@ Use `single()` to ensure exactly one record is returned (returns an error if zer ```typescript const result = await db - .from("users") + .from(users) .list() - .filter({ email: { eq: "user@example.com" } }) + .where(eq(users.email, "user@example.com")) .single() .execute(); @@ -520,9 +429,9 @@ Use `maybeSingle()` when you want at most one record (returns `null` if no recor ```typescript const result = await db - .from("users") + .from(users) .list() - .filter({ email: { eq: "user@example.com" } }) + .where(eq(users.email, "user@example.com")) .maybeSingle() .execute(); @@ -545,12 +454,17 @@ if (result.data) { All query methods can be chained together: ```typescript +// Using new ORM API const result = await db - .from("users") + .from(users) .list() - .select("username", "email", "age") - .filter({ age: { gt: 18 } }) - .orderBy("username") + .select({ + username: users.username, + email: users.email, + age: users.age, + }) + .where(gt(users.age, 18)) + .orderBy(asc(users.username)) .top(10) .skip(0) .execute(); @@ -565,7 +479,7 @@ Insert new records with type-safe data: ```typescript // Insert a new user const result = await db - .from("users") + .from(users) .insert({ username: "johndoe", email: "john@example.com", @@ -578,30 +492,25 @@ if (result.data) { } ``` -Fields are automatically required for insert if their validator doesn't allow `null` or `undefined`. You can specify additional required fields: +Fields are automatically required for insert if they use `.notNull()`. Read-only fields (including primary keys) are automatically excluded: ```typescript -const usersBase = defineBaseTable({ - schema: { - id: z.string(), // Auto-required (not nullable), but excluded from insert (idField) - username: z.string(), // Auto-required (not nullable) - email: z.string(), // Auto-required (not nullable) - phone: z.string().nullable(), // Optional by default - createdAt: z.string(), // Auto-required, but excluded (readOnly) - }, - idField: "id", // Automatically excluded from insert/update - required: ["phone"], // Make phone required for inserts despite being nullable - readOnly: ["createdAt"], // Exclude from insert/update operations +const users = fmTableOccurrence("users", { + id: textField().primaryKey(), // Auto-required, but excluded from insert (primaryKey) + username: textField().notNull(), // Auto-required (notNull) + email: textField().notNull(), // Auto-required (notNull) + phone: textField(), // Optional by default (nullable) + createdAt: timestampField().readOnly(), // Excluded from insert/update }); -// TypeScript enforces: username, email, and phone are required +// TypeScript enforces: username and email are required // TypeScript excludes: id and createdAt cannot be provided const result = await db - .from("users") + .from(users) .insert({ username: "johndoe", email: "john@example.com", - phone: "+1234567890", // Required because specified in 'required' array + phone: "+1234567890", // Optional }) .execute(); ``` @@ -613,7 +522,7 @@ Update records by ID or filter: ```typescript // Update by ID const result = await db - .from("users") + .from(users) .update({ username: "newname" }) .byId("user-123") .execute(); @@ -622,29 +531,27 @@ if (result.data) { console.log(`Updated ${result.data.updatedCount} record(s)`); } -// Update by filter +// Update by filter (using new ORM API) +import { lt, and, eq } from "@proofkit/fmodata"; + const result = await db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ lastLogin: { lt: "2023-01-01" } })) + .where(lt(users.lastLogin, "2023-01-01")) .execute(); // Complex filter example const result = await db - .from("users") + .from(users) .update({ active: false }) - .where((q) => - q.filter({ - and: [{ active: true }, { count: { lt: 5 } }], - }), - ) + .where(and(eq(users.active, true), lt(users.count, 5))) .execute(); -// Update with additional query options +// Update with additional query options (legacy filter API) const result = await db .from("users") .update({ active: false }) - .where((q) => q.filter({ active: true }).top(10)) + .where((q) => q.where(eq(users.active, true)).top(10)) .execute(); ``` @@ -654,28 +561,26 @@ Delete records by ID or filter: ```typescript // Delete by ID -const result = await db.from("users").delete().byId("user-123").execute(); +const result = await db.from(users).delete().byId("user-123").execute(); if (result.data) { console.log(`Deleted ${result.data.deletedCount} record(s)`); } -// Delete by filter +// Delete by filter (using new ORM API) +import { eq, and, lt } from "@proofkit/fmodata"; + const result = await db - .from("users") + .from(users) .delete() - .where((q) => q.filter({ active: false })) + .where(eq(users.active, false)) .execute(); // Delete with complex filters const result = await db - .from("users") + .from(users) .delete() - .where((q) => - q.filter({ - and: [{ active: false }, { lastLogin: { lt: "2023-01-01" } }], - }), - ) + .where(and(eq(users.active, false), lt(users.lastLogin, "2023-01-01"))) .execute(); ``` @@ -683,148 +588,145 @@ const result = await db ### Defining Navigation -Use `buildOccurrences()` to define relationships between tables. This function takes an array of table occurrences and a configuration object that specifies navigation relationships using type-safe string references: +Define navigation relationships using the `navigationPaths` option when creating table occurrences: ```typescript -import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, -} from "@proofkit/fmodata"; +import { fmTableOccurrence, textField } from "@proofkit/fmodata"; -const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - userId: z.string(), +const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField().notNull(), + userId: textField().notNull(), }, - idField: "id", -}); - -const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), + { + navigationPaths: ["users"], // Valid navigation targets }, - idField: "id", -}); - -// Step 1: Define base table occurrences (without navigation) -const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, -}); - -const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, -}); +); -// Step 2: Build occurrences with navigation using string references -// The strings autocomplete to valid table occurrence names! -const occurrences = buildOccurrences({ - occurrences: [_contactsTO, _usersTO], - navigation: { - contacts: ["users"], - users: ["contacts"], +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), }, -}); + { + navigationPaths: ["contacts"], // Valid navigation targets + }, +); // Use with your database const db = connection.database("MyDB", { - occurrences: occurrences, + occurrences: [contacts, users], }); ``` -The `buildOccurrences` function accepts an object with: - -- `occurrences` - Array of TableOccurrences to build -- `navigation` - Optional object mapping TO names to arrays of navigation targets - -It returns a tuple in the same order as the input array, with full autocomplete for navigation target names. Self-navigation is prevented at the type level. +The `navigationPaths` option: -- Handles circular references automatically -- Returns fully typed `TableOccurrence` instances with resolved navigation +- Specifies which table occurrences can be navigated to from this table +- Enables runtime validation when using `expand()` or `navigate()` +- Throws descriptive errors if you try to navigate to an invalid path ### Navigating Between Tables Navigate to related records: ```typescript -// Navigate from a specific record +// Navigate from a specific record (using column references) const result = await db - .from("contacts") + .from(contacts) .get("contact-123") - .navigate("users") - .select("username", "email") + .navigate(users) + .select({ + username: users.username, + email: users.email, + }) .execute(); // Navigate without specifying a record first -const result = await db.from("contacts").navigate("users").list().execute(); +const result = await db.from(contacts).navigate(users).list().execute(); -// You can navigate to arbitrary tables not in your schema +// Using legacy API with string field names const result = await db - .from("contacts") - .navigate("some_other_table") - .list() + .from(contacts) + .get("contact-123") + .navigate(users) + .select({ username: users.username, email: users.email }) .execute(); ``` ### Expanding Related Records -Use `expand()` to include related records in your query results: +Use `expand()` to include related records in your query results. The library validates that the target table is in the source table's `navigationPaths`: ```typescript // Simple expand -const result = await db.from("contacts").list().expand("users").execute(); +const result = await db.from(contacts).list().expand(users).execute(); -// Expand with field selection +// Expand with field selection (using column references) const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.select("username", "email")) + .expand(users, (b) => + b.select({ + username: users.username, + email: users.email, + }), + ) .execute(); -// Expand with filtering +// Expand with filtering (using new ORM API) +import { eq } from "@proofkit/fmodata"; + const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.filter({ active: true })) + .expand(users, (b) => b.where(eq(users.active, true))) .execute(); // Multiple expands const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.select("username")) - .expand("orders", (b) => b.select("total").top(5)) + .expand(users, (b) => b.select({ username: users.username })) + .expand(orders, (b) => b.select({ total: orders.total }).top(5)) .execute(); // Nested expands const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (usersBuilder) => + .expand(users, (usersBuilder) => usersBuilder - .select("username", "email") - .expand("customer", (customerBuilder) => - customerBuilder.select("name", "tier"), + .select({ + username: users.username, + email: users.email, + }) + .expand(customers, (customerBuilder) => + customerBuilder.select({ + name: customers.name, + tier: customers.tier, + }), ), ) .execute(); // Complex expand with multiple options const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => + .expand(users, (b) => b - .select("username", "email") - .filter({ active: true }) - .orderBy("username") + .select({ + username: users.username, + email: users.email, + }) + .where(eq(users.active, true)) + .orderBy(asc(users.username)) .top(10) - .expand("customer", (nested) => nested.select("name")), + .expand(customers, (nested) => nested.select({ name: customers.name })), ) .execute(); ``` @@ -906,8 +808,8 @@ Execute multiple read operations in a single batch: ```typescript // Create query builders -const contactsQuery = db.from("contacts").list().top(5); -const usersQuery = db.from("users").list().top(5); +const contactsQuery = db.from(contacts).list().top(5); +const usersQuery = db.from(users).list().top(5); // Execute both queries in a single batch const result = await db.batch([contactsQuery, usersQuery]).execute(); @@ -937,12 +839,12 @@ Combine queries, inserts, updates, and deletes in a single batch: ```typescript // Mix different operation types -const listQuery = db.from("contacts").list().top(10); -const insertOp = db.from("contacts").insert({ +const listQuery = db.from(contacts).list().top(10); +const insertOp = db.from(contacts).insert({ name: "John Doe", email: "john@example.com", }); -const updateOp = db.from("users").update({ active: true }).byId("user-123"); +const updateOp = db.from(users).update({ active: true }).byId("user-123"); // All operations execute atomically const result = await db.batch([listQuery, insertOp, updateOp]).execute(); @@ -1014,9 +916,9 @@ Batch operations are transactional for write operations (inserts, updates, delet ```typescript const result = await db .batch([ - db.from("users").insert({ username: "alice", email: "alice@example.com" }), - db.from("users").insert({ username: "bob", email: "bob@example.com" }), - db.from("users").insert({ username: "charlie", email: "invalid" }), // This fails + db.from(users).insert({ username: "alice", email: "alice@example.com" }), + db.from(users).insert({ username: "bob", email: "bob@example.com" }), + db.from(users).insert({ username: "charlie", email: "invalid" }), // This fails ]) .execute(); @@ -1300,94 +1202,50 @@ await db.schema.createIndex("users", "email"); ## Advanced Features -### Type Safety - -The library provides full TypeScript type inference: - -```typescript -const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - }, - idField: "id", -}); - -const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, -}); - -const db = connection.database("MyDB", { - occurrences: [usersTO], -}); - -// TypeScript knows these are valid field names -db.from("users").list().select("username", "email"); - -// TypeScript error: "invalid" is not a field name -db.from("users").list().select("invalid"); // TS Error - -// Type-safe filters -db.from("users") - .list() - .filter({ username: { eq: "john" } }); // ✓ -db.from("users") - .list() - .filter({ invalid: { eq: "john" } }); // TS Error -``` - ### Required and Read-Only Fields -The library automatically infers which fields are required based on whether their validator allows `null` or `undefined`: +The library automatically infers which fields are required based on field builder configuration: ```typescript -const usersBase = defineBaseTable({ - schema: { - id: z.string(), // Auto-required, auto-readOnly (idField) - username: z.string(), // Auto-required (not nullable) - email: z.string(), // Auto-required (not nullable) - status: z.string().nullable(), // Optional (nullable) - createdAt: z.string(), // Read-only system field - updatedAt: z.string().nullable(), // Optional - }, - idField: "id", // Automatically excluded from insert/update - required: ["status"], // Make status required despite being nullable - readOnly: ["createdAt"], // Exclude createdAt from insert/update +const users = fmTableOccurrence("users", { + id: textField().primaryKey(), // Auto-required, auto-readOnly (primaryKey) + username: textField().notNull(), // Auto-required (notNull) + email: textField().notNull(), // Auto-required (notNull) + status: textField(), // Optional (nullable by default) + createdAt: timestampField().readOnly(), // Read-only system field + updatedAt: timestampField(), // Optional (nullable) }); -// Insert: username, email, and status are required -// Insert: id and createdAt are excluded (cannot be provided) -db.from("users").insert({ +// Insert: username and email are required +// Insert: id and createdAt are excluded (cannot be provided - read-only) +db.from(users).insert({ username: "john", email: "john@example.com", - status: "active", // Required due to 'required' array + status: "active", // Optional updatedAt: new Date().toISOString(), // Optional }); // Update: all fields are optional except id and createdAt are excluded -db.from("users") +db.from(users) .update({ status: "active", // Optional - // id and createdAt cannot be modified + // id and createdAt cannot be modified (read-only) }) .byId("user-123"); ``` **Key Features:** -- **Auto-inference:** Non-nullable fields are automatically required for insert -- **Additional requirements:** Use `required` to make nullable fields required for new records -- **Read-only fields:** Use `readOnly` to exclude fields from insert/update (e.g., timestamps) -- **Automatic ID exclusion:** The `idField` is always read-only without needing to specify it +- **Auto-inference:** Fields with `.notNull()` are automatically required for insert +- **Primary keys:** Fields with `.primaryKey()` are automatically read-only +- **Read-only fields:** Use `.readOnly()` to exclude fields from insert/update (e.g., timestamps, calculated fields) - **Update flexibility:** All fields are optional for updates (except read-only fields) ### Prefer: fmodata.entity-ids This library supports using FileMaker's internal field identifiers (FMFID) and table occurrence identifiers (FMTID) instead of names. This protects your integration from both field and table occurrence name changes. -To enable this feature, simply define your schema with entity IDs using the `defineBaseTable` and `defineTableOccurrence` functions. Behind the scenes, the library will transform your request and the response back to the names you specify in these schemas. This is an all-or-nothing feature. For it to work properly, you must define all table occurrences passed to a `Database` with entity IDs (both `fmfIds` on the base table and `fmtId` on the table occurrence). +To enable this feature, simply define your schema with entity IDs using the `.entityId()` method on field builders and the `entityId` option in `fmTableOccurrence()`. Behind the scenes, the library will transform your request and the response back to the names you specify in your schema. This is an all-or-nothing feature. For it to work properly, you must define all table occurrences passed to a `Database` with entity IDs (both field IDs via `.entityId()` and table ID via the `entityId` option). _Note for OttoFMS proxy: This feature requires version 4.14 or later of OttoFMS_ @@ -1396,32 +1254,25 @@ How do I find these ids? They can be found in the XML version of the `$metadata` #### Basic Usage ```typescript -import { defineBaseTable, defineTableOccurrence } from "@proofkit/fmodata"; -import { z } from "zod/v4"; +import { + fmTableOccurrence, + textField, + timestampField, +} from "@proofkit/fmodata"; -// Define a base table with FileMaker field IDs -const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string().nullable(), - createdAt: z.string(), +// Define a table with FileMaker field IDs and table occurrence ID +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:12039485"), + username: textField().notNull().entityId("FMFID:34323433"), + email: textField().entityId("FMFID:12232424"), + createdAt: timestampField().readOnly().entityId("FMFID:43234355"), }, - idField: "id", - fmfIds: { - id: "FMFID:12039485", - username: "FMFID:34323433", - email: "FMFID:12232424", - createdAt: "FMFID:43234355", + { + entityId: "FMTID:12432533", // FileMaker table occurrence ID }, -}); - -// Create a table occurrence with a FileMaker table occurrence ID -const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - fmtId: "FMTID:12432533", -}); +); ``` ### Error Handling @@ -1431,7 +1282,7 @@ All operations return a `Result` type with either `data` or `error`. The library #### Basic Error Checking ```typescript -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { console.error("Query failed:", result.error.message); @@ -1450,7 +1301,7 @@ Handle HTTP status codes (4xx, 5xx) with the `HTTPError` class: ```typescript import { HTTPError, isHTTPError } from "@proofkit/fmodata"; -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { if (isHTTPError(result.error)) { @@ -1487,7 +1338,7 @@ import { CircuitOpenError, } from "@proofkit/fmodata"; -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { if (result.error instanceof TimeoutError) { @@ -1513,7 +1364,7 @@ When schema validation fails, you get a `ValidationError` with rich context: ```typescript import { ValidationError, isValidationError } from "@proofkit/fmodata"; -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { if (isValidationError(result.error)) { @@ -1532,7 +1383,7 @@ The library uses [Standard Schema](https://github.com/standard-schema/standard-s ```typescript import { ValidationError } from "@proofkit/fmodata"; -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error instanceof ValidationError) { // The cause property (ES2022 Error.cause) contains the Standard Schema issues array @@ -1585,7 +1436,7 @@ Handle OData-specific protocol errors: ```typescript import { ODataError, isODataError } from "@proofkit/fmodata"; -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { if (isODataError(result.error)) { @@ -1608,7 +1459,7 @@ import { NetworkError, } from "@proofkit/fmodata"; -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { if (result.error instanceof TimeoutError) { @@ -1630,7 +1481,7 @@ if (result.error) { **Pattern 2: Using kind property (for exhaustive matching):** ```typescript -const result = await db.from("users").list().execute(); +const result = await db.from(users).list().execute(); if (result.error) { switch (result.error.kind) { @@ -1786,7 +1637,7 @@ const queryString = db .from("users") .list() .select("username", "email") - .filter({ active: true }) + .where(eq(users.active, true)) .orderBy("username") .top(10) .getQueryString(); diff --git a/packages/fmodata/docs/ORM_API.md b/packages/fmodata/docs/ORM_API.md new file mode 100644 index 00000000..2e8fd651 --- /dev/null +++ b/packages/fmodata/docs/ORM_API.md @@ -0,0 +1,415 @@ +# New ORM API (Drizzle-Inspired) + +The new ORM API provides a Drizzle-inspired interface for defining tables and building queries with enhanced type safety and developer experience. + +## Key Features + +- **Field Builders**: Fluent API for defining fields with metadata (primary keys, entity IDs, validators) +- **Column References**: Type-safe column references for queries (`users.id`, `users.name`) +- **Filter Operators**: Standalone operator functions (`eq()`, `gt()`, `and()`, `or()`) +- **Support for Both Styles**: Works with both typed strings AND column references +- **Cross-Table Comparisons**: Compare columns across tables (`eq(users.id, contacts.id_user)`) +- **Runtime Navigation Validation**: Validates expand paths against `navigationPaths` + +## Table Definition + +### Basic Table + +```typescript +import { fmTableOccurrence, textField, numberField, timestampField } from "@proofkit/fmodata"; + +export const users = fmTableOccurrence("users", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:6"), + email: textField().entityId("FMFID:7"), + age: numberField().entityId("FMFID:8"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:2"), +}, { + entityId: "FMTID:100", + defaultSelect: "schema", + navigationPaths: ["contacts", "orders"], +}); +``` + +### Field Builder Methods + +All field builders support these chainable methods: + +- `.primaryKey()` - Mark as primary key (automatically read-only) +- `.notNull()` - Mark as non-nullable +- `.readOnly()` - Exclude from insert/update operations +- `.entityId(id)` - Assign FileMaker field ID (FMFID) +- `.outputValidator(validator)` - Transform data when reading (FM → your app) +- `.inputValidator(validator)` - Transform data when writing (your app → FM) + +### Available Field Types + +```typescript +textField() // string | null +numberField() // number | null +dateField() // string | null (ISO date) +timeField() // string | null (ISO time) +timestampField() // string | null (ISO 8601) +containerField() // string | null (base64) +calcField() // string | null (auto read-only) +``` + +### Custom Field Types with Validators + +```typescript +import { z } from "zod/v4"; + +// Boolean field (FM stores as 0/1) +const booleanField = () => + numberField() + .outputValidator(z.coerce.boolean()) + .inputValidator(z.boolean().transform(v => v ? 1 : 0)); + +// Enum field +const statusField = () => + textField() + .outputValidator(z.enum(["active", "pending", "inactive"])); + +// Use in table definition +const users = fmTableOccurrence("users", { + active: booleanField().entityId("FMFID:7"), + status: statusField().entityId("FMFID:8"), +}, { entityId: "FMTID:100" }); +``` + +## Querying + +### Select - Multiple Syntax Options + +```typescript +import { eq } from "@proofkit/fmodata"; + +// Option 1: Typed strings (original style) +db.from(users).select("id", "name", "email"); + +// Option 2: Column references (new capability) +db.from(users).select(users.id, users.name, users.email); + +// Option 3: Mix both styles +db.from(users).select(users.id, "name", users.email); +``` + +### Filter with Operators + +#### Comparison Operators + +```typescript +import { eq, ne, gt, gte, lt, lte } from "@proofkit/fmodata"; + +// Equal +db.from(users).where(eq(users.status, "active")); + +// Not equal +db.from(users).where(ne(users.status, "deleted")); + +// Greater than / Greater than or equal +db.from(users).where(gt(users.age, 18)); +db.from(users).where(gte(users.age, 18)); + +// Less than / Less than or equal +db.from(users).where(lt(users.age, 65)); +db.from(users).where(lte(users.age, 65)); +``` + +#### String Operators + +```typescript +import { contains, startsWith, endsWith } from "@proofkit/fmodata"; + +// Contains substring +db.from(users).where(contains(users.name, "John")); + +// Starts with prefix +db.from(users).where(startsWith(users.email, "admin")); + +// Ends with suffix +db.from(users).where(endsWith(users.email, "@example.com")); +``` + +#### Array Operators + +```typescript +import { inArray, notInArray } from "@proofkit/fmodata"; + +// Value in array +db.from(users).where(inArray(users.status, ["active", "pending"])); + +// Value not in array +db.from(users).where(notInArray(users.status, ["deleted", "banned"])); +``` + +#### Null Checks + +```typescript +import { isNull, isNotNull } from "@proofkit/fmodata"; + +// Is null +db.from(users).where(isNull(users.deletedAt)); + +// Is not null +db.from(users).where(isNotNull(users.email)); +``` + +#### Logical Operators + +```typescript +import { and, or, not, eq, gt } from "@proofkit/fmodata"; + +// AND - all conditions must be true +db.from(users).where( + and( + eq(users.active, true), + gt(users.age, 18) + ) +); + +// OR - at least one condition must be true +db.from(users).where( + or( + eq(users.role, "admin"), + eq(users.role, "moderator") + ) +); + +// NOT - negate a condition +db.from(users).where( + not(eq(users.status, "deleted")) +); + +// Complex combinations +db.from(users).where( + and( + eq(users.active, true), + or( + eq(users.role, "admin"), + and( + eq(users.role, "user"), + gt(users.age, 18) + ) + ) + ) +); +``` + +### Cross-Table Column Comparisons + +```typescript +// Compare columns from different tables +db.from(users) + .select(users.id, users.name) + .where(eq(users.id, contacts.id_user)); + +// Works with any comparison operator +db.from(orders) + .where(gt(orders.total, users.credit_limit)); +``` + +### Order By + +```typescript +// With strings +db.from(users).orderBy("name"); +db.from(users).orderBy(["name", "asc"]); +db.from(users).orderBy([["name", "asc"], ["createdAt", "desc"]]); + +// With Column references +db.from(users).orderBy(users.name); +db.from(users).orderBy([users.name, "asc"]); +db.from(users).orderBy([[users.name, "asc"], [users.createdAt, "desc"]]); +``` + +## Navigation & Expansion + +Navigation paths are defined in the table definition and validated at runtime: + +```typescript +const users = fmTableOccurrence("users", { + // ... fields +}, { + navigationPaths: ["contacts", "orders"], // Valid paths +}); + +// Valid expansion (contacts is in navigationPaths) +db.from(users) + .expand(contacts, (q) => q.select("name", "email")) + .execute(); + +// Error: "Cannot expand to 'invoices'. Valid navigation paths: contacts, orders" +db.from(users) + .expand(invoices, (q) => q.select("id")) // Runtime error! + .execute(); +``` + +## Type Inference + +The new API provides excellent type inference: + +```typescript +// users.id is Column +type UserId = typeof users.id; + +// users.hobby is Column<"reading" | "writing" | "coding", "hobby"> +// (inferred from the enum validator) +type UserHobby = typeof users.hobby; + +// Filter values are type-checked +eq(users.hobby, "reading") // ✓ Valid - "reading" is in enum +eq(users.hobby, "invalid") // ✗ Type error - not in enum + +// Select fields are type-checked +db.from(users).select("id", "name") // ✓ Valid +db.from(users).select("invalid") // ✗ Type error +``` + +## Migration from Old API + +The new ORM API coexists with the old API. Both are exported from `@proofkit/fmodata`: + +```typescript +// Old API (still works) +import { defineBaseTable, defineTableOccurrence } from "@proofkit/fmodata"; + +// New API +import { fmTableOccurrence, textField, eq } from "@proofkit/fmodata"; +``` + +### Key Differences + +| Feature | Old API | New API | +|---------|---------|---------| +| Table Definition | `defineBaseTable` + `defineTableOccurrence` | `fmTableOccurrence` | +| Schema | Zod schemas in separate object | Field builders inline | +| Metadata | Separate `required`, `readOnly` arrays | Chainable methods on fields | +| Filters | Object syntax or typed strings | Operator functions | +| Select | Typed strings only | Typed strings OR column references | +| Navigation | Type-safe via `buildOccurrences()` | Runtime validation via `navigationPaths` | + +### Migration Example + +**Old API:** +```typescript +const usersBase = defineBaseTable({ + schema: { + id: z.string(), + name: z.string().nullable(), + active: z.coerce.boolean(), + }, + idField: "id", + readOnly: ["CreationTimestamp"], + fmfIds: { id: "FMFID:1", name: "FMFID:6" }, +}); + +const users = defineTableOccurrence({ + name: "users", + baseTable: usersBase, +}); + +// Query +db.from(users) + .select("id", "name") + .filter({ active: { eq: true } }); +``` + +**New API:** +```typescript +const users = fmTableOccurrence("users", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:6"), + active: numberField() + .outputValidator(z.coerce.boolean()) + .inputValidator(z.boolean().transform(v => v ? 1 : 0)), + CreationTimestamp: timestampField().readOnly(), +}); + +// Query +db.from(users) + .select(users.id, users.name) + .where(eq(users.active, true)); +``` + +## Best Practices + +1. **Use Column References for Clarity**: `users.name` is more explicit than `"name"` +2. **Define Reusable Field Builders**: Extract common patterns like `booleanField()` +3. **Leverage Type Inference**: Let TypeScript infer types from validators +4. **Use Logical Operators**: Prefer `and()` / `or()` over nested objects +5. **Validate Navigation Paths**: Always define `navigationPaths` for type safety +6. **Combine Old and New APIs**: Use whichever feels better for each use case + +## Complete Example + +```typescript +import { + fmTableOccurrence, + textField, + numberField, + timestampField, + FMServerConnection, + eq, + and, + or, + gt, + contains, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +// Define tables +const users = fmTableOccurrence("users", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + age: numberField().entityId("FMFID:4"), + status: textField() + .outputValidator(z.enum(["active", "pending", "inactive"])) + .entityId("FMFID:5"), + createdAt: timestampField().readOnly().entityId("FMFID:6"), +}, { + entityId: "FMTID:100", + navigationPaths: ["orders"], +}); + +const orders = fmTableOccurrence("orders", { + id: textField().primaryKey().entityId("FMFID:10"), + user_id: textField().entityId("FMFID:11"), + total: numberField().entityId("FMFID:12"), + status: textField().entityId("FMFID:13"), +}, { + entityId: "FMTID:101", + navigationPaths: ["users"], +}); + +// Connect +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "test-api-key" }, +}); +const db = connection.database("MyDatabase.fmp12"); + +// Query with new API +const result = await db + .from(users) + .select(users.id, users.name, users.email) + .where( + and( + or( + eq(users.status, "active"), + eq(users.status, "pending") + ), + gt(users.age, 18), + contains(users.email, "@example.com") + ) + ) + .orderBy([[users.name, "asc"], [users.createdAt, "desc"]]) + .top(50) + .execute(); + +if (result.data) { + console.log(`Found ${result.data.length} users`); +} +``` + diff --git a/packages/fmodata/package.json b/packages/fmodata/package.json index e03de79b..72e875e0 100644 --- a/packages/fmodata/package.json +++ b/packages/fmodata/package.json @@ -1,6 +1,6 @@ { "name": "@proofkit/fmodata", - "version": "0.1.0-alpha.13", + "version": "0.1.0-alpha.19", "description": "FileMaker OData API client", "repository": "git@github.com:proofgeist/proofkit.git", "author": "Eric <37158449+eluce2@users.noreply.github.com>", @@ -25,12 +25,16 @@ "format": "prettier --write .", "dev": "tsc --watch", "test": "vitest run --typecheck", + "tsc": "tsc --noEmit", "test:typecheck": "vitest run --typecheck", "test:watch": "vitest --typecheck", + "test:build": "pnpm build && TEST_BUILD=true vitest run --typecheck", + "test:watch:build": "TEST_BUILD=true vitest --typecheck", "test:e2e": "op inject -i op.env -o .env.local -f && vitest run tests/e2e.test.ts", "capture": "op inject -i op.env -o .env.local -f && tsx scripts/capture-responses.ts", "knip": "knip", - "pub:alpha": "bun run scripts/publish-alpha.ts" + "pub:alpha": "bun run scripts/publish-alpha.ts", + "global:link": "pnpm link --global" }, "dependencies": { "@fetchkit/ffetch": "^4.2.0", @@ -51,6 +55,7 @@ "@standard-schema/spec": "^1.0.0", "@tanstack/vite-config": "^0.2.0", "@types/node": "^22.17.1", + "fast-xml-parser": "^5.3.2", "prettier": "^3.5.3", "publint": "^0.3.12", "tsx": "^4.19.2", diff --git a/packages/fmodata/scripts/capture-responses.ts b/packages/fmodata/scripts/capture-responses.ts index 75685e53..7fa66266 100644 --- a/packages/fmodata/scripts/capture-responses.ts +++ b/packages/fmodata/scripts/capture-responses.ts @@ -396,6 +396,16 @@ const queriesToCapture: { return { url, response }; }, }, + { + name: "list with nested expand", + description: "List query with deeply nested expand and selected fields", + execute: async (client) => { + const path = `/contacts?$top=2&$expand=users($expand=user_customer($select=name))`; + const response = await client(path); + const url = response.url; + return { url, response }; + }, + }, ]; /** diff --git a/packages/fmodata/scripts/download-metadata.ts b/packages/fmodata/scripts/download-metadata.ts new file mode 100644 index 00000000..3e5d8168 --- /dev/null +++ b/packages/fmodata/scripts/download-metadata.ts @@ -0,0 +1,102 @@ +#!/usr/bin/env bun + +/** + * OData Metadata Downloader + * + * This script downloads OData metadata from a FileMaker server and saves it + * to a JSON file. The metadata can then be used with typegen-starter.ts to + * generate TypeScript table occurrence definitions. + * + * Usage: + * bun scripts/download-metadata.ts + * + * For now, authentication details are hardcoded in the script. + * Later, this will support command-line arguments for: + * - username and password, OR + * - API key and server URL + */ + +import { FMServerConnection } from "../src/client/filemaker-odata"; +import { writeFile } from "node:fs/promises"; +import { resolve, dirname } from "node:path"; +import { fileURLToPath } from "node:url"; +import dotenv from "dotenv"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = dirname(__filename); +dotenv.config({ path: resolve(__dirname, ".env.local") }); + +// ============================================================================ +// HARDCODED CONFIGURATION - Replace these with your actual values +// ============================================================================ + +const SERVER_URL = process.env.FM_SERVER; // Replace with your server URL +const DATABASE_NAME = process.env.FM_DATABASE; // Replace with your database name + +const USERNAME = process.env.FM_USERNAME; // Replace with your username +const PASSWORD = process.env.FM_PASSWORD; // Replace with your password + +// Output file path (relative to the scripts directory) +const OUTPUT_FILE = "../tests/fixtures/metadata.xml"; // Adjust as needed + +// ============================================================================ +// END CONFIGURATION +// ============================================================================ + +// Get __dirname equivalent in ES modules + +async function downloadMetadata(): Promise { + console.log("Connecting to FileMaker server..."); + console.log(`Server URL: ${SERVER_URL}`); + console.log(`Database: ${DATABASE_NAME}`); + + if (!SERVER_URL || !DATABASE_NAME || !USERNAME || !PASSWORD) { + throw new Error("Missing required configuration values"); + } + + // Create connection based on authentication method + const connection = new FMServerConnection({ + serverUrl: SERVER_URL, + auth: { username: USERNAME, password: PASSWORD }, + fetchClientOptions: { + timeout: 15000, // 10 seconds + retries: 2, + }, + }); + + const db = connection.database(DATABASE_NAME); + + console.log("Downloading metadata..."); + + try { + const fullMetadata = await db.getMetadata({ format: "xml" }); + + // Resolve output path + const outputPath = resolve(__dirname, OUTPUT_FILE); + + console.log(`Writing metadata to: ${outputPath}`); + + // Write metadata to file + await writeFile(outputPath, fullMetadata, "utf-8"); + + console.log("✓ Metadata downloaded successfully!"); + console.log( + `\nYou can now use this metadata file with typegen-starter.ts:`, + ); + console.log( + ` bun scripts/typegen-starter.ts ${OUTPUT_FILE} output/occurrences.ts`, + ); + } catch (error) { + console.error("Error downloading metadata:", error); + if (error instanceof Error) { + console.error("Error message:", error.message); + } + process.exit(1); + } +} + +// Run the script +downloadMetadata().catch((error) => { + console.error("Fatal error:", error); + process.exit(1); +}); diff --git a/packages/fmodata/scripts/dreams.ts b/packages/fmodata/scripts/dreams.ts new file mode 100644 index 00000000..ecda67b3 --- /dev/null +++ b/packages/fmodata/scripts/dreams.ts @@ -0,0 +1,162 @@ +// Example of the new ORM-style API for fmodata +// This demonstrates the Drizzle-inspired syntax with field builders and operators + +import { + fmTableOccurrence, + textField, + numberField, + dateField, + timeField, + timestampField, + containerField, + calcField, + eq, + gt, + and, + or, + contains, +} from "../src/orm"; +import { FMServerConnection } from "../src"; +import { z } from "zod/v4"; + +// Helper function for boolean fields (FileMaker stores as 0/1) +const booleanField = () => + numberField() + // Parses the number to a boolean when reading from the database + .outputValidator(z.coerce.boolean()) + // Allows the user to pass a boolean when inserting or updating, converting it back to number + .inputValidator(z.boolean().transform((val) => (val ? 1 : 0))); + +// Define table with field builders +// All fields nullable by default, unless primary key or "notNull" is set +export const users = fmTableOccurrence( + "users", // table name on the graph + { + id: textField().primaryKey().entityId("FMFID:1"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:2"), + CreatedBy: textField().readOnly().entityId("FMFID:3"), + ModificationTimestamp: timestampField().readOnly().entityId("FMFID:4"), + ModifiedBy: textField().readOnly().entityId("FMFID:5"), + name: textField().notNull().entityId("FMFID:6"), + active: booleanField().entityId("FMFID:7"), + id_customer: textField().entityId("FMFID:8"), + hobby: textField() + .outputValidator(z.enum(["reading", "writing", "coding"])) + .entityId("FMFID:9"), + }, + { + entityId: "FMTID:100", + defaultSelect: "schema", + navigationPaths: ["contacts"], // Runtime validation when expanding + }, +); + +// @ts-expect-error should not be able to see property +users._entityId; + +// @ts-expect-error should not be able to see symbols +users[FMTableBaseTableConfig]; + +// Example contacts table +export const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:10"), + name: textField().notNull().entityId("FMFID:11"), + email: textField().entityId("FMFID:12"), + id_user: textField().entityId("FMFID:13"), + }, + { + entityId: "FMTID:101", + defaultSelect: "schema", + navigationPaths: ["users"], + }, +); + +const connection = new FMServerConnection({ + serverUrl: "https://api.example.com", + auth: { apiKey: "test-api-key" }, +}); + +const db = connection.database("MyDatabase.fmp12"); + +// Expand with FMTable object (validated against navigationPaths) +db.from(users).list().expand(contacts); + +// Navigate with FMTable object (validated against navigationPaths) +db.from(users).navigate(contacts).list(); + +// ============================================================================ +// Query Examples - New ORM-style API +// ============================================================================ + +// Select with typed strings (original style) +db.from(users).list().select(users.id); + +// Select with Column references (new capability) +db.from(users) + .list() + .select({ id: users.id, name: users.name, hobby: users.hobby }); + +// Filter with operators - "reading" autocompletes based on enum +db.from(users) + .list() + .select(users.id, users.name) + .where(eq(users.hobby, "reading")); + +// Complex filters with logical operators +db.from(users) + .list() + .select("id", "name") + .where( + and( + eq(users.active, true), + or(eq(users.hobby, "reading"), eq(users.hobby, "coding")), + ), + ); + +// String operators +db.from(users) + .list() + .select("name", "email") + .where(contains(users.name, "John")); + +// // Cross-table column comparison +// db.from(users).select("id", "name").where(eq(users.id, contacts.id_user)); + +// OrderBy with Column references +db.from(users).list().select("id", "name").orderBy([users.name, "asc"]); + +// OrderBy with strings (still supported) +db.from(users) + .list() + .select(users.id, users.name) + .orderBy([ + ["name", "asc"], + ["CreationTimestamp", "desc"], + ]); + +// ============================================================================ +// Note: Insert/Update/Delete APIs remain unchanged +// ============================================================================ + +// Insert (existing API) +// db.from(users).insert({ name: "John", hobby: "reading" }); + +// Update (existing API) +// db.from(users).update({ name: "Jane" }).where(eq(users.id, "123")); + +// Delete (existing API) +// db.from(users).delete().where(eq(users.id, "123")); + +// ============================================================================ +// Type inference examples +// ============================================================================ + +// users.id is Column +// users.name is Column +// users.hobby is Column<"reading" | "writing" | "coding", "hobby"> +// users.active is Column + +type UserId = typeof users.id; // Column +type UserHobby = typeof users.hobby; // Column<"reading" | "writing" | "coding", "hobby"> diff --git a/packages/fmodata/scripts/experiment-batch.ts b/packages/fmodata/scripts/experiment-batch.ts index aa6328e0..44174f20 100644 --- a/packages/fmodata/scripts/experiment-batch.ts +++ b/packages/fmodata/scripts/experiment-batch.ts @@ -15,8 +15,10 @@ import { fileURLToPath } from "url"; import { z } from "zod/v4"; import { FMServerConnection, - defineBaseTable, - defineTableOccurrence, + fmTableOccurrence, + textField, + timestampField, + eq, } from "../src/index"; // Get __dirname equivalent in ES modules @@ -38,23 +40,15 @@ if (!serverUrl || !username || !password || !database) { } // Define schemas -const contactsBase = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - hobby: z.string().nullable(), - id_user: z.string().nullable(), - }, - idField: "PrimaryKey", -}); - -const contactsTO = defineTableOccurrence({ - name: "contacts" as const, - baseTable: contactsBase, +const contactsTO = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), }); // Create connection @@ -124,7 +118,9 @@ async function experiment1_MultipleInserts() { async function experiment2_MixedOperations() { console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 2: Mixed Operations (GET + INSERT + UPDATE + DELETE)"); + console.log( + "EXPERIMENT 2: Mixed Operations (GET + INSERT + UPDATE + DELETE)", + ); console.log("=".repeat(60)); // First, create a record we can update/delete @@ -172,7 +168,11 @@ async function experiment2_MixedOperations() { if (result.data) { // Track insert result for cleanup const insertResult = result.data[1]; - if (insertResult && typeof insertResult === "object" && "PrimaryKey" in insertResult) { + if ( + insertResult && + typeof insertResult === "object" && + "PrimaryKey" in insertResult + ) { createdRecordIds.push(insertResult.PrimaryKey as string); } } @@ -205,8 +205,12 @@ async function experiment3_FailingOperation() { hobby: "Should this succeed?", }); - console.log("\nExecuting batch with: INSERT (valid), UPDATE (invalid ID), INSERT (valid)..."); - console.log("Question: What happens to the third operation when the second fails?"); + console.log( + "\nExecuting batch with: INSERT (valid), UPDATE (invalid ID), INSERT (valid)...", + ); + console.log( + "Question: What happens to the third operation when the second fails?", + ); const result = await db.batch([insert1, failingUpdate, insert2]).execute(); @@ -269,7 +273,9 @@ async function experiment4_FailingDelete() { async function experiment5_AllGetWithOneFailure() { console.log("\n" + "=".repeat(60)); - console.log("EXPERIMENT 5: Multiple GETs with One Filter that Returns Nothing"); + console.log( + "EXPERIMENT 5: Multiple GETs with One Filter that Returns Nothing", + ); console.log("=".repeat(60)); // Query that should return results @@ -277,14 +283,16 @@ async function experiment5_AllGetWithOneFailure() { // Query with a filter that returns empty (not an error, just no results) const query2 = db - .from("contacts") + .from(contactsTO) .list() - .filter({ name: "THIS_NAME_DEFINITELY_DOES_NOT_EXIST_12345" }); + .where(eq(contactsTO.name, "THIS_NAME_DEFINITELY_DOES_NOT_EXIST_12345")); // Another query that should return results const query3 = db.from("contacts").list().top(1); - console.log("\nExecuting batch with: GET (valid), GET (empty filter), GET (valid)..."); + console.log( + "\nExecuting batch with: GET (valid), GET (empty filter), GET (valid)...", + ); const result = await db.batch([query1, query2, query3]).execute(); @@ -302,10 +310,10 @@ async function experiment6_RawResponseInspection() { // Make a direct batch request to see raw response const timestamp = Date.now(); const boundary = "batch_direct_test_123"; - + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; const batchUrl = `${baseUrl}/$batch`; - + // Build a simple batch body with one GET const batchBody = [ `--${boundary}`, @@ -327,7 +335,7 @@ async function experiment6_RawResponseInspection() { const response = await fetch(batchUrl, { method: "POST", headers: { - "Authorization": authHeader, + Authorization: authHeader, "Content-Type": `multipart/mixed; boundary=${boundary}`, "OData-Version": "4.0", }, @@ -352,10 +360,10 @@ async function experiment7_RawResponseWithInsert() { const timestamp = Date.now(); const boundary = "batch_insert_test_456"; const changesetBoundary = "changeset_insert_789"; - + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; const batchUrl = `${baseUrl}/$batch`; - + const insertBody = JSON.stringify({ name: `Direct Insert Test - ${timestamp}`, hobby: "Testing", @@ -388,7 +396,7 @@ async function experiment7_RawResponseWithInsert() { const response = await fetch(batchUrl, { method: "POST", headers: { - "Authorization": authHeader, + Authorization: authHeader, "Content-Type": `multipart/mixed; boundary=${boundary}`, "OData-Version": "4.0", }, @@ -403,7 +411,7 @@ async function experiment7_RawResponseWithInsert() { console.log("\n--- Raw Response Body ---"); console.log(responseText); console.log("--- End Raw Response ---"); - + // Try to extract created record ID for cleanup const pkMatch = responseText.match(/"PrimaryKey":\s*"([^"]+)"/); if (pkMatch && pkMatch[1]) { @@ -454,7 +462,7 @@ async function experiment8_TrueError() { const response = await fetch(batchUrl, { method: "POST", headers: { - "Authorization": authHeader, + Authorization: authHeader, "Content-Type": `multipart/mixed; boundary=${boundary}`, "OData-Version": "4.0", }, @@ -479,13 +487,19 @@ async function experiment9_RawResponseWithFailure() { const boundary = "batch_fail_test"; const cs1 = "changeset_1"; const cs2 = "changeset_2"; - + const baseUrl = `${serverUrl}/fmi/odata/v4/${database}`; const batchUrl = `${baseUrl}/$batch`; - - const insertBody1 = JSON.stringify({ name: `Before Fail - ${timestamp}`, hobby: "Test" }); + + const insertBody1 = JSON.stringify({ + name: `Before Fail - ${timestamp}`, + hobby: "Test", + }); const updateBody = JSON.stringify({ hobby: "Should fail" }); - const insertBody2 = JSON.stringify({ name: `After Fail - ${timestamp}`, hobby: "Test" }); + const insertBody2 = JSON.stringify({ + name: `After Fail - ${timestamp}`, + hobby: "Test", + }); // Build: INSERT (valid), UPDATE (invalid ID), INSERT (valid) const batchBody = [ @@ -527,7 +541,7 @@ async function experiment9_RawResponseWithFailure() { "Content-Transfer-Encoding: binary", "", `POST ${baseUrl}/contacts HTTP/1.1`, - "Content-Type: application/json", + "Content-Type: application/json", "Prefer: return=representation", `Content-Length: ${insertBody2.length}`, "", @@ -543,7 +557,7 @@ async function experiment9_RawResponseWithFailure() { const response = await fetch(batchUrl, { method: "POST", headers: { - "Authorization": authHeader, + Authorization: authHeader, "Content-Type": `multipart/mixed; boundary=${boundary}`, "OData-Version": "4.0", }, @@ -557,7 +571,7 @@ async function experiment9_RawResponseWithFailure() { console.log("\n--- Raw Response Body ---"); console.log(responseText); console.log("--- End Raw Response ---"); - + // Extract created record IDs for cleanup const pkMatches = responseText.matchAll(/"PrimaryKey":\s*"([^"]+)"/g); for (const match of pkMatches) { @@ -598,4 +612,3 @@ async function main() { } main().catch(console.error); - diff --git a/packages/fmodata/scripts/publish-alpha.ts b/packages/fmodata/scripts/publish-alpha.ts index 21ff7f6e..79899953 100644 --- a/packages/fmodata/scripts/publish-alpha.ts +++ b/packages/fmodata/scripts/publish-alpha.ts @@ -135,6 +135,17 @@ function hasUncommittedChanges(): { hasChanges: boolean; details: string } { function commitChanges(message: string): void { const repoRoot = resolve(__dirname, "../.."); try { + // Check if there are any changes to commit + const status = execSync("git status --porcelain", { + cwd: repoRoot, + encoding: "utf-8", + }).trim(); + + if (!status) { + console.log(`ℹ️ No changes to commit`); + return; + } + // Stage all changes execSync("git add -A", { cwd: repoRoot, @@ -228,6 +239,51 @@ function autoBumpPatch(fromVersion?: string): string { return bumpVersion(fromVersion ?? version, "patch"); } +function checkNpmAuth(): boolean { + try { + execSync("npm whoami", { + cwd: resolve(__dirname, ".."), + stdio: "pipe", + }); + return true; + } catch (error) { + return false; + } +} + +async function ensureNpmAuth(): Promise { + if (checkNpmAuth()) { + console.log("✅ Authenticated with npm"); + return; + } + + console.log("\n⚠️ Not authenticated with npm"); + console.log(" You need to log in to npm before publishing.\n"); + + const answer = await question("Would you like to log in now? (y/n): "); + + if (answer.toLowerCase() !== "y" && answer.toLowerCase() !== "yes") { + console.log("❌ Publish cancelled - npm authentication required."); + rl.close(); + process.exit(0); + } + + console.log("\n🔐 Opening npm login..."); + console.log(" (This will open your browser for authentication)\n"); + + try { + execSync("npm login", { + cwd: resolve(__dirname, ".."), + stdio: "inherit", + }); + console.log("\n✅ Successfully logged in to npm"); + } catch (error) { + console.error("\n❌ Failed to log in to npm"); + rl.close(); + process.exit(1); + } +} + async function updateVersion(newVersion: string) { packageJson.version = newVersion; writeFileSync( @@ -241,25 +297,6 @@ async function updateVersion(newVersion: string) { async function main() { try { - // Check for uncommitted changes first - const gitStatus = hasUncommittedChanges(); - if (gitStatus.hasChanges) { - console.log(`\n⚠️ You have uncommitted changes (${gitStatus.details})`); - console.log(" These must be committed before publishing.\n"); - - const commitMessage = await question( - "Enter commit message (or leave empty to cancel): ", - ); - - if (!commitMessage.trim()) { - console.log("❌ Publish cancelled - no commit message provided."); - rl.close(); - process.exit(0); - } - - commitChanges(commitMessage.trim()); - } - console.log(`\n📦 Checking npm registry for ${packageName}...`); // Check npm for published version @@ -281,19 +318,40 @@ async function main() { console.log(` Local git hash: ${localGitHash.substring(0, 7)}`); } - const comparison = compareVersions(version, publishedVersion); const gitHashesMatch = publishedGitHash && localGitHash && publishedGitHash === localGitHash; + // Only check for uncommitted changes if git hashes match + // If hashes match but there are uncommitted changes, that's fine - we'll commit later + // If hashes match and there are NO uncommitted changes, prevent republishing same code + if (gitHashesMatch) { + const gitStatus = hasUncommittedChanges(); + if (!gitStatus.hasChanges) { + console.log( + `\n⚠️ Git hashes match and there are no uncommitted changes.`, + ); + console.log( + "❌ Cannot republish the exact same code that's already on npm.", + ); + rl.close(); + process.exit(0); + } + // If hashes match but there are uncommitted changes, proceed (will commit later) + } + + const comparison = compareVersions(version, publishedVersion); + if (comparison <= 0) { // Version needs to be bumped if (gitHashesMatch) { + // Git hashes match but we have uncommitted changes (already checked above) + // Auto-bump patch version from the HIGHER version + const versionToBumpFrom = comparison < 0 ? publishedVersion : version; console.log( - `\n⚠️ Local version (${version}) is not greater than published version (${publishedVersion}), but git hashes match.`, + `\n🔄 Git hashes match but you have uncommitted changes - automatically bumping from ${versionToBumpFrom}...`, ); - console.log("❌ Cannot publish without bumping version."); - rl.close(); - process.exit(0); + const newVersion = autoBumpPatch(versionToBumpFrom); + await updateVersion(newVersion); } else { // Git hashes differ, auto-bump patch version from the HIGHER version // (usually the published version when local is behind) @@ -313,6 +371,9 @@ async function main() { } } else { console.log(` No published version found (first publish)`); + // If version hasn't been published, ensure we have a valid version + // The current version should be fine, but we could bump if needed + console.log(` Using current version: ${version}`); } console.log(`\n📦 Ready to publish:`); @@ -340,6 +401,9 @@ async function main() { process.exit(0); } + // Check and ensure npm authentication + await ensureNpmAuth(); + // Publish with npm (will prompt for 2FA interactively if needed) console.log("\n🚀 Publishing to npm with tag 'alpha'..."); execSync("npm publish --tag alpha --access public --", { @@ -355,6 +419,10 @@ async function main() { }); console.log("\n✅ Successfully published!"); + + // Commit the version change with the version number as the commit message + console.log(`\n📝 Committing version change...`); + commitChanges(version); } catch (error) { console.error("\n❌ Error:", error); rl.close(); diff --git a/packages/fmodata/scripts/typegen-starter.ts b/packages/fmodata/scripts/typegen-starter.ts index 7394c5bd..77e7e7fc 100755 --- a/packages/fmodata/scripts/typegen-starter.ts +++ b/packages/fmodata/scripts/typegen-starter.ts @@ -3,49 +3,51 @@ /** * OData Metadata to TypeScript Table Occurrence Generator * - * This script parses OData metadata JSON files and generates TypeScript code - * with defineBaseTable, defineTableOccurrence, and buildOccurrences definitions + * This script parses OData metadata XML files and generates TypeScript code + * with fmTableOccurrence definitions using navigationPaths * for use with the fmodata package. * * Usage: - * bun scripts/typegen-starter.ts + * bun scripts/typegen-starter.ts * * Example: - * bun scripts/typegen-starter.ts tests/fixtures/sample-metadata.json output/occurrences.ts + * bun scripts/typegen-starter.ts tests/fixtures/metadata.xml output/ * * Features: * - Automatically maps OData types (Edm.String, Edm.Decimal, etc.) to Zod types - * - Identifies key fields from $Key and ensures they're non-nullable + * - Identifies key fields from Key elements and ensures they're non-nullable * - Marks calculation fields as readOnly * - Handles nullable fields with .nullable() * - Extracts FileMaker field IDs (FMFID) and table IDs (FMTID) * - Smart ID field detection (prioritizes @AutoGenerated fields or fields with "id" in name) - * - Generates navigation relationships via buildOccurrences with type-safe string refs + * - Generates navigation relationships via navigationPaths with type-safe string refs + * - Outputs one file per table with dynamic imports based on used field builders */ -import { readFile, writeFile } from "node:fs/promises"; -import { resolve } from "node:path"; +import { readFile, writeFile, mkdir } from "node:fs/promises"; +import { resolve, join, basename } from "node:path"; +import { XMLParser } from "fast-xml-parser"; -// Map OData types to Zod types -function mapODataTypeToZod(edmType: string): string { +// Map OData types to field builder functions +function mapODataTypeToFieldBuilder(edmType: string): string { switch (edmType) { case "Edm.String": - return "z.string()"; + return "textField()"; case "Edm.Decimal": case "Edm.Int32": case "Edm.Int64": case "Edm.Double": - return "z.number()"; + return "numberField()"; case "Edm.Boolean": - return "z.boolean()"; + return "numberField().outputValidator(z.coerce.boolean())"; case "Edm.Date": - return "z.iso.date()"; // ISO date string + return "dateField()"; // ISO date string case "Edm.DateTimeOffset": - return "z.iso.datetime()"; // ISO datetime string + return "timestampField()"; // ISO datetime string case "Edm.Binary": - return "z.string()"; // base64 encoded + return "containerField()"; // base64 encoded default: - return "z.unknown()"; + return "textField()"; // Default to textField for unknown types } } @@ -63,75 +65,79 @@ interface FieldMetadata { } interface NavigationProperty { - $Kind: "NavigationProperty"; - $Type: string; - $Collection?: boolean; + Name: string; + Type: string; // e.g., "Collection(com.filemaker.odata.WebData.fmp12.Work_Orders_)" } interface EntityType { - $Kind: "EntityType"; + Name: string; "@TableID": string; $Key?: string[]; - [fieldName: string]: any; + Properties: Map; + NavigationProperties: NavigationProperty[]; } -interface Metadata { - $Version: string; - "@ServerVersion": string; - [namespace: string]: any; +interface EntitySet { + Name: string; + EntityType: string; // Full type name like "com.filemaker.odata.WebData.fmp12.Addresses_" } interface GeneratedTO { varName: string; code: string; navigation: string[]; // Array of target TO names + usedFieldBuilders: Set; // Track which field builders are used + needsZod: boolean; // Whether z.coerce.boolean() is used +} + +function extractEntityTypeNameFromType(typeString: string): string | null { + // Extract entity type name from Type like "Collection(com.filemaker.odata.WebData.fmp12.Work_Orders_)" + // Returns "Work_Orders_" + // Pattern: Collection(namespace.EntityTypeName) -> extract EntityTypeName + const collectionMatch = typeString.match(/Collection\(([^)]+)\)/); + if (collectionMatch) { + const fullType = collectionMatch[1]; + // Extract the last part after the last dot (e.g., "com.filemaker.odata.WebData.fmp12.Work_Orders_" -> "Work_Orders_") + const parts = fullType.split("."); + return parts[parts.length - 1] || null; + } + // Try without Collection wrapper - extract last part after last dot + const parts = typeString.split("."); + return parts.length > 0 ? parts[parts.length - 1] : null; } function generateTableOccurrence( - entityName: string, + entitySetName: string, entityType: EntityType, - namespace: string, entityTypeToSetMap: Map, ): GeneratedTO { const fmtId = entityType["@TableID"]; const keyFields = entityType.$Key || []; - const fields: Record = {}; + const fields = entityType.Properties; const readOnlyFields: string[] = []; const navigationTargets: string[] = []; - - // Extract field definitions and navigation properties - for (const [key, value] of Object.entries(entityType)) { - if ( - key.startsWith("$") || - key.startsWith("@") || - typeof value !== "object" || - !value.$Type - ) { - continue; - } - - // Check if it's a navigation property - if (value.$Kind === "NavigationProperty") { - const navProp = value as NavigationProperty; - // Extract the target entity type from the $Type (e.g., "WebData.fmp12.Addresses_" -> "Addresses_") - const targetEntityType = navProp.$Type.replace(`${namespace}.`, ""); - const targetEntitySet = entityTypeToSetMap.get(targetEntityType); + const usedFieldBuilders = new Set(); + let needsZod = false; + + // Process navigation properties + for (const navProp of entityType.NavigationProperties) { + const targetEntityTypeName = extractEntityTypeNameFromType(navProp.Type); + if (targetEntityTypeName) { + const targetEntitySet = entityTypeToSetMap.get(targetEntityTypeName); if (targetEntitySet) { navigationTargets.push(targetEntitySet); } - continue; } + } - // Regular field - fields[key] = value as FieldMetadata; - - // Check if field is calculation or read-only + // Determine read-only fields + for (const [fieldName, metadata] of fields.entries()) { if ( - value["@Calculation"] || - value["@Global"] || - value["@Org.OData.Core.V1.Permissions"]?.includes("Read") + metadata["@Calculation"] || + metadata["@Global"] || + metadata["@Org.OData.Core.V1.Permissions"]?.includes("Read") ) { - readOnlyFields.push(key); + readOnlyFields.push(fieldName); } } @@ -142,9 +148,9 @@ function generateTableOccurrence( idField = keyFields[0]; } else { // Find a suitable ID field: look for auto-generated fields or fields with "id" in the name - const fieldNames = Object.keys(fields); + const fieldNames = Array.from(fields.keys()); const autoGenField = fieldNames.find( - (name) => fields[name]["@AutoGenerated"], + (name) => fields.get(name)?.["@AutoGenerated"], ); const idFieldName = fieldNames.find( (name) => @@ -155,142 +161,348 @@ function generateTableOccurrence( idField = autoGenField || idFieldName || fieldNames[0]; } - // Generate schema object - const schemaLines: string[] = []; - const fieldEntries = Object.entries(fields); + // Generate field builder definitions + const fieldLines: string[] = []; + const fieldEntries = Array.from(fields.entries()); for (let i = 0; i < fieldEntries.length; i++) { const [fieldName, metadata] = fieldEntries[i]; - const zodType = mapODataTypeToZod(metadata.$Type); - const isKeyField = keyFields.includes(fieldName); - const isNullable = metadata.$Nullable === true && !isKeyField; - const isLastField = i === fieldEntries.length - 1; - - let line = ` ${JSON.stringify(fieldName)}: ${zodType}`; - if (isNullable) { - line += ".nullable()"; + const fieldBuilder = mapODataTypeToFieldBuilder(metadata.$Type); + + // Track which field builders are used + if (fieldBuilder.includes("textField()")) { + usedFieldBuilders.add("textField"); + } else if (fieldBuilder.includes("numberField()")) { + usedFieldBuilders.add("numberField"); + } else if (fieldBuilder.includes("dateField()")) { + usedFieldBuilders.add("dateField"); + } else if (fieldBuilder.includes("timestampField()")) { + usedFieldBuilders.add("timestampField"); + } else if (fieldBuilder.includes("containerField()")) { + usedFieldBuilders.add("containerField"); } - // Add comma if not the last field - if (!isLastField) { - line += ","; + // Track if z.coerce.boolean() is used + if (fieldBuilder.includes("z.coerce.boolean()")) { + needsZod = true; } - // Add comment for key fields or special types - const comments: string[] = []; + const isKeyField = keyFields.includes(fieldName); + // Only add .notNull() if explicitly marked as Nullable="false" in XML + // metadata.$Nullable is false only if Nullable="false" was in XML, otherwise it's true (nullable by default) + const isExplicitlyNotNullable = metadata.$Nullable === false; + const isReadOnly = readOnlyFields.includes(fieldName); + const isLastField = i === fieldEntries.length - 1; + + let line = ` ${JSON.stringify(fieldName)}: ${fieldBuilder}`; + + // Chain methods: primaryKey, readOnly, notNull, entityId if (isKeyField) { - comments.push("Key field - never null"); - } else if (!metadata.$Nullable) { - comments.push("Not nullable"); + line += ".primaryKey()"; + } + if (isReadOnly) { + line += ".readOnly()"; + } + // Only add .notNull() if explicitly marked as Nullable="false" in XML + // Key fields are handled by primaryKey() which already makes them not null + if (isExplicitlyNotNullable && !isKeyField) { + line += ".notNull()"; } - if (metadata["@AutoGenerated"]) { - comments.push("Auto-generated"); + if (metadata["@FieldID"]) { + line += `.entityId(${JSON.stringify(metadata["@FieldID"])})`; } - if (comments.length > 0) { - line += ` // ${comments.join(", ")}`; + // Add comma if not the last field + if (!isLastField) { + line += ","; } - schemaLines.push(line); + fieldLines.push(line); } - // Generate fmfIds object - const fmfIdsLines: string[] = []; - const fmfFieldEntries = Object.entries(fields); - for (let i = 0; i < fmfFieldEntries.length; i++) { - const [fieldName, metadata] = fmfFieldEntries[i]; - const isLastField = i === fmfFieldEntries.length - 1; - const comma = isLastField ? "" : ","; - fmfIdsLines.push( - ` ${JSON.stringify(fieldName)}: ${JSON.stringify(metadata["@FieldID"])}${comma}`, - ); + const varName = entitySetName.replace(/[^a-zA-Z0-9_]/g, "_"); + + // Build options object + const optionsParts: string[] = []; + if (fmtId) { + optionsParts.push(`entityId: ${JSON.stringify(fmtId)}`); } + // Always include navigationPaths, even if empty + const navPaths = navigationTargets.map((n) => JSON.stringify(n)).join(", "); + optionsParts.push(`navigationPaths: [${navPaths}]`); - // Generate the table occurrence - const readOnlyConfig = - readOnlyFields.length > 0 - ? `,\n readOnly: [${readOnlyFields.map((f) => JSON.stringify(f)).join(", ")}] as const` + const optionsSection = + optionsParts.length > 0 + ? `, {\n${optionsParts.map((p) => ` ${p}`).join(",\n")}\n}` : ""; - const varName = entityName.replace(/[^a-zA-Z0-9_]/g, "_"); - - const code = `// ${entityName} table occurrence -const _${varName} = defineTableOccurrence({ - fmtId: ${JSON.stringify(fmtId)}, - name: ${JSON.stringify(entityName)} as const, - baseTable: defineBaseTable({ - schema: { -${schemaLines.join("\n")} - }, - idField: ${JSON.stringify(idField)}, - fmfIds: { -${fmfIdsLines.join("\n")} - }${readOnlyConfig} - }), -});`; + const code = `export const ${varName} = fmTableOccurrence(${JSON.stringify(entitySetName)}, { +${fieldLines.join("\n")} +}${optionsSection});`; return { varName, code, navigation: navigationTargets, + usedFieldBuilders, + needsZod, }; } -async function generateFromMetadata( - inputPath: string, - outputPath: string, -): Promise { - console.log(`Reading metadata from: ${inputPath}`); +function ensureArray(value: T | T[] | undefined): T[] { + if (!value) return []; + return Array.isArray(value) ? value : [value]; +} - // Read and parse the metadata JSON - const metadataContent = await readFile(inputPath, "utf-8"); - const metadata: Metadata = JSON.parse(metadataContent); +async function parseXMLMetadata(xmlContent: string): Promise<{ + entityTypes: Map; + entitySets: Map; + namespace: string; +}> { + const entityTypes = new Map(); + const entitySets = new Map(); + let namespace = ""; + + // Parse XML using fast-xml-parser + const parser = new XMLParser({ + ignoreAttributes: false, + attributeNamePrefix: "@_", + textNodeName: "#text", + parseAttributeValue: true, + trimValues: true, + parseTrueNumberOnly: false, + arrayMode: false, + }); + + const parsed = parser.parse(xmlContent); + + // Navigate to Schema element + const edmx = parsed["edmx:Edmx"] || parsed.Edmx; + if (!edmx) { + throw new Error("No Edmx element found in XML"); + } - // Find the namespace (skip $Version and @ServerVersion) - const namespace = Object.keys(metadata).find( - (key) => !key.startsWith("$") && !key.startsWith("@"), - ); + const dataServices = edmx["edmx:DataServices"] || edmx.DataServices; + if (!dataServices) { + throw new Error("No DataServices element found in XML"); + } - if (!namespace) { - throw new Error("No namespace found in metadata"); + const schema = ensureArray(dataServices.Schema)[0]; + if (!schema) { + throw new Error("No Schema element found in XML"); } - const namespaceData = metadata[namespace]; - const entityTypes: Record = {}; - const entitySets: Record = {}; - - // Extract entity types and entity sets - for (const [key, value] of Object.entries(namespaceData)) { - if (typeof value === "object" && value !== null) { - const obj = value as any; - if (obj.$Kind === "EntityType") { - entityTypes[key] = value as EntityType; - } else if (obj.$Kind === "EntitySet") { - // EntitySet references an EntityType - const typeName = obj.$Type?.replace(`${namespace}.`, ""); - if (typeName) { - entitySets[key] = typeName; + namespace = schema["@_Namespace"] || schema.Namespace || ""; + + // Extract EntityTypes + const entityTypeList = ensureArray(schema.EntityType); + for (const entityTypeEl of entityTypeList) { + const entityTypeName = entityTypeEl["@_Name"] || entityTypeEl.Name; + if (!entityTypeName) continue; + + // Get TableID from Annotation + let tableId = ""; + const annotations = ensureArray(entityTypeEl.Annotation); + for (const ann of annotations) { + const term = ann["@_Term"] || ann.Term; + if (term === "com.filemaker.odata.TableID") { + tableId = ann["@_String"] || ann.String || ""; + break; + } + } + + // Get Key fields + const keyFields: string[] = []; + if (entityTypeEl.Key) { + const propertyRefs = ensureArray(entityTypeEl.Key.PropertyRef); + for (const propRef of propertyRefs) { + const name = propRef["@_Name"] || propRef.Name; + if (name) keyFields.push(name); + } + } + + // Extract Properties + const properties = new Map(); + const propertyList = ensureArray(entityTypeEl.Property); + for (const propEl of propertyList) { + const propName = propEl["@_Name"] || propEl.Name; + if (!propName) continue; + + const propType = propEl["@_Type"] || propEl.Type || ""; + // Nullable is false only if explicitly set to "false", otherwise assume nullable + // The parser converts "false" to boolean false, so check for both + const nullableAttr = propEl["@_Nullable"] ?? propEl.Nullable; + const isExplicitlyNotNullable = + nullableAttr === "false" || nullableAttr === false; + const defaultValue = + propEl["@_DefaultValue"] || propEl.DefaultValue || undefined; + + // Get annotations + let fieldId = ""; + let isCalculation = false; + let isGlobal = false; + let isAutoGenerated = false; + let hasIndex = false; + let isVersionId = false; + let permissions: string | undefined; + + const propAnnotations = ensureArray(propEl.Annotation); + for (const ann of propAnnotations) { + const term = ann["@_Term"] || ann.Term; + if (term === "com.filemaker.odata.FieldID") { + fieldId = ann["@_String"] || ann.String || ""; + } else if (term === "com.filemaker.odata.Calculation") { + isCalculation = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.Global") { + isGlobal = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.AutoGenerated") { + isAutoGenerated = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.Index") { + hasIndex = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "com.filemaker.odata.VersionID") { + isVersionId = ann["@_Bool"] === "true" || ann.Bool === "true"; + } else if (term === "Org.OData.Core.V1.Permissions") { + const enumMember = ann.EnumMember; + if (enumMember) { + permissions = + typeof enumMember === "string" + ? enumMember + : enumMember["#text"] || undefined; + } + } + } + + properties.set(propName, { + $Type: propType, + $Nullable: !isExplicitlyNotNullable, // true if not explicitly set to false + "@FieldID": fieldId, + "@Calculation": isCalculation, + "@Global": isGlobal, + "@Org.OData.Core.V1.Permissions": permissions, + $DefaultValue: defaultValue, + "@AutoGenerated": isAutoGenerated, + "@Index": hasIndex, + "@VersionID": isVersionId, + }); + } + + // Extract NavigationProperties + const navigationProperties: NavigationProperty[] = []; + if (entityTypeEl.NavigationProperty) { + const navPropList = ensureArray(entityTypeEl.NavigationProperty); + for (const navPropEl of navPropList) { + const navName = navPropEl["@_Name"] || navPropEl.Name; + const navType = navPropEl["@_Type"] || navPropEl.Type; + if (navName && navType) { + navigationProperties.push({ + Name: navName, + Type: navType, + }); } } } + + entityTypes.set(entityTypeName, { + Name: entityTypeName, + "@TableID": tableId, + $Key: keyFields, + Properties: properties, + NavigationProperties: navigationProperties, + }); + } + + // Extract EntitySets from EntityContainer + const entityContainer = ensureArray(schema.EntityContainer)[0]; + if (entityContainer) { + const entitySetList = ensureArray(entityContainer.EntitySet); + for (const entitySetEl of entitySetList) { + const setName = entitySetEl["@_Name"] || entitySetEl.Name; + const entityType = entitySetEl["@_EntityType"] || entitySetEl.EntityType; + if (setName && entityType) { + // Extract just the entity type name from the full type string + // e.g., "com.filemaker.odata.WebData.fmp12.Addresses_" -> "Addresses_" + const typeNameMatch = entityType.match(/\.([^.]+)$/); + const entityTypeName = typeNameMatch ? typeNameMatch[1] : entityType; + + entitySets.set(setName, { + Name: setName, + EntityType: entityTypeName, + }); + } + } + } + + return { entityTypes, entitySets, namespace }; +} + +function generateImports( + usedFieldBuilders: Set, + needsZod: boolean, +): string { + const fieldBuilderImports: string[] = []; + + // Always need fmTableOccurrence + fieldBuilderImports.push("fmTableOccurrence"); + + // Add only the field builders that are actually used + if (usedFieldBuilders.has("textField")) { + fieldBuilderImports.push("textField"); } + if (usedFieldBuilders.has("numberField")) { + fieldBuilderImports.push("numberField"); + } + if (usedFieldBuilders.has("dateField")) { + fieldBuilderImports.push("dateField"); + } + if (usedFieldBuilders.has("timestampField")) { + fieldBuilderImports.push("timestampField"); + } + if (usedFieldBuilders.has("containerField")) { + fieldBuilderImports.push("containerField"); + } + + const imports = [ + `import { ${fieldBuilderImports.join(", ")} } from "@proofkit/fmodata"`, + ]; + + if (needsZod) { + imports.push(`import { z } from "zod/v4"`); + } + + return imports.join(";\n") + ";\n"; +} + +function sanitizeFileName(name: string): string { + // Convert to a safe filename + return name.replace(/[^a-zA-Z0-9_]/g, "_"); +} + +async function generateFromMetadata( + inputPath: string, + outputFolder: string, +): Promise { + console.log(`Reading metadata from: ${inputPath}`); + + // Read and parse the metadata XML + const xmlContent = await readFile(inputPath, "utf-8"); + const { entityTypes, entitySets, namespace } = + await parseXMLMetadata(xmlContent); // Build a map from entity type name to entity set name const entityTypeToSetMap = new Map(); - for (const [entitySetName, entityTypeName] of Object.entries(entitySets)) { - entityTypeToSetMap.set(entityTypeName, entitySetName); + for (const [entitySetName, entitySet] of entitySets.entries()) { + entityTypeToSetMap.set(entitySet.EntityType, entitySetName); } // Generate table occurrences for entity sets const generatedTOs: GeneratedTO[] = []; - console.log(`\nFound ${Object.keys(entitySets).length} entity sets:`); - for (const [entitySetName, entityTypeName] of Object.entries(entitySets)) { - const entityType = entityTypes[entityTypeName]; + console.log(`\nFound ${entitySets.size} entity sets:`); + for (const [entitySetName, entitySet] of entitySets.entries()) { + const entityType = entityTypes.get(entitySet.EntityType); if (entityType) { const generated = generateTableOccurrence( entitySetName, entityType, - namespace, entityTypeToSetMap, ); @@ -298,77 +510,76 @@ async function generateFromMetadata( generated.navigation.length > 0 ? ` [nav: ${generated.navigation.join(", ")}]` : ""; - console.log(` - ${entitySetName} (${entityTypeName})${navInfo}`); + console.log(` - ${entitySetName} (${entitySet.EntityType})${navInfo}`); generatedTOs.push(generated); } } - // Generate the base TO definitions (Phase 1) - const baseDefinitions = generatedTOs.map((to) => to.code).join("\n\n"); + // Create output directory + console.log(`\nCreating output directory: ${outputFolder}`); + await mkdir(outputFolder, { recursive: true }); - // Generate the buildOccurrences call (Phase 2) - const varNames = generatedTOs.map((to) => to.varName); - const baseVarNames = varNames.map((v) => `_${v}`); + // Generate one file per table occurrence + const exportStatements: string[] = []; - // Build navigation config - const navConfigLines: string[] = []; - for (const to of generatedTOs) { - if (to.navigation.length > 0) { - const navTargets = to.navigation.map((n) => JSON.stringify(n)).join(", "); - navConfigLines.push( - ` ${JSON.stringify(to.varName)}: [${navTargets}],`, - ); - } - } + for (const generated of generatedTOs) { + const fileName = `${sanitizeFileName(generated.varName)}.ts`; + const filePath = join(outputFolder, fileName); - const navConfigSection = - navConfigLines.length > 0 - ? `\n navigation: {\n${navConfigLines.join("\n")}\n },` - : ""; - - // Generate the complete TypeScript file - const output = `import { defineBaseTable, defineTableOccurrence, buildOccurrences } from "@proofkit/fmodata"; -import { z } from "zod/v4"; + // Generate imports based on what's actually used in this file + const imports = generateImports( + generated.usedFieldBuilders, + generated.needsZod, + ); + const fileContent = `${imports} // ============================================================================ -// Phase 1: Define all TableOccurrences (without navigation) +// Table Occurrence: ${generated.varName} // ============================================================================ -${baseDefinitions} +${generated.code} +`; -// ============================================================================ -// Phase 2: Build final TableOccurrences with navigation relationships -// ============================================================================ + await writeFile(filePath, fileContent, "utf-8"); + console.log(` ✓ Generated ${fileName}`); -export const [${varNames.join(", ")}] = buildOccurrences({ - occurrences: [${baseVarNames.join(", ")}],${navConfigSection} -}); + // Collect export statement for index file + exportStatements.push( + `export { ${generated.varName} } from "./${sanitizeFileName(generated.varName)}";`, + ); + } -// Export as array for use with database() -export const occurrences = [${varNames.join(", ")}]; + // Generate index.ts file that exports all table occurrences + const indexContent = `// ============================================================================ +// Auto-generated index file - exports all table occurrences +// ============================================================================ + +${exportStatements.join("\n")} `; - console.log(`Writing output to: ${outputPath}`); - await writeFile(outputPath, output, "utf-8"); - console.log("✓ Generation complete!"); + const indexPath = join(outputFolder, "index.ts"); + await writeFile(indexPath, indexContent, "utf-8"); + console.log(` ✓ Generated index.ts`); + + console.log( + `\n✓ Generation complete! Generated ${generatedTOs.length} table occurrence files.`, + ); } // Main execution const args = process.argv.slice(2); if (args.length < 2) { - console.error("Usage: bun typegen-starter.ts "); - console.error( - "\nExample: bun typegen-starter.ts sample-metadata.json sample-occurances.ts", - ); + console.error("Usage: bun typegen-starter.ts "); + console.error("\nExample: bun typegen-starter.ts metadata.xml output/"); process.exit(1); } const inputPath = resolve(args[0]); -const outputPath = resolve(args[1]); +const outputFolder = resolve(args[1]); -generateFromMetadata(inputPath, outputPath).catch((error) => { +generateFromMetadata(inputPath, outputFolder).catch((error) => { console.error("Error:", error); process.exit(1); }); diff --git a/packages/fmodata/src/client/base-table.ts b/packages/fmodata/src/client/base-table.ts deleted file mode 100644 index 53a73cd5..00000000 --- a/packages/fmodata/src/client/base-table.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { StandardSchemaV1 } from "@standard-schema/spec"; - -/** - * BaseTable defines the schema and configuration for a table. - * Use `defineBaseTable()` to create instances with proper type inference. - * - * @template Schema - Record of field names to StandardSchemaV1 validators - * @template IdField - The name of the primary key field (optional, automatically read-only) - * @template Required - Additional field names to require on insert (beyond auto-inferred required fields) - * @template ReadOnly - Field names that cannot be modified via insert/update (idField is automatically read-only) - * - * @example Basic table with auto-inferred required fields - * ```ts - * import { z } from "zod"; - * import { defineBaseTable } from "@proofkit/fmodata"; - * - * const usersTable = defineBaseTable({ - * schema: { - * id: z.string(), // Auto-required (not nullable), auto-readOnly (idField) - * name: z.string(), // Auto-required (not nullable) - * email: z.string().nullable(), // Optional (nullable) - * }, - * idField: "id", - * }); - * // On insert: name is required, email is optional (id is excluded - readOnly) - * // On update: name and email available (id is excluded - readOnly) - * ``` - * - * @example Table with additional required and readOnly fields - * ```ts - * import { z } from "zod"; - * import { defineBaseTable } from "@proofkit/fmodata"; - * - * const usersTable = defineBaseTable({ - * schema: { - * id: z.string(), // Auto-required, auto-readOnly (idField) - * createdAt: z.string(), // Read-only system field - * name: z.string(), // Auto-required - * email: z.string().nullable(), // Optional by default... - * legacyField: z.string().nullable(), // Optional by default... - * }, - * idField: "id", - * required: ["legacyField"], // Make legacyField required for new inserts - * readOnly: ["createdAt"], // Exclude from insert/update - * }); - * // On insert: name and legacyField required; email optional (id and createdAt excluded) - * // On update: all fields optional (id and createdAt excluded) - * ``` - * - * @example Table with multiple read-only fields - * ```ts - * import { z } from "zod"; - * import { defineBaseTable } from "@proofkit/fmodata"; - * - * const usersTable = defineBaseTable({ - * schema: { - * id: z.string(), - * createdAt: z.string(), - * modifiedAt: z.string(), - * createdBy: z.string(), - * notes: z.string().nullable(), - * }, - * idField: "id", - * readOnly: ["createdAt", "modifiedAt", "createdBy"], - * }); - * // On insert/update: only notes is available (id and system fields excluded) - * ``` - */ -export class BaseTable< - Schema extends Record = any, - IdField extends keyof Schema | undefined = undefined, - Required extends readonly (keyof Schema | (string & {}))[] = readonly [], - ReadOnly extends readonly (keyof Schema | (string & {}))[] = readonly [], -> { - public readonly schema: Schema; - public readonly idField?: IdField; - public readonly required?: Required; - public readonly readOnly?: ReadOnly; - public readonly fmfIds?: Record< - keyof Schema | (string & {}), - `FMFID:${string}` - >; - - constructor(config: { - schema: Schema; - idField?: IdField; - required?: Required; - readOnly?: ReadOnly; - fmfIds?: Record; - }) { - this.schema = config.schema; - this.idField = config.idField; - this.required = config.required; - this.readOnly = config.readOnly; - this.fmfIds = config.fmfIds as - | Record - | undefined; - } - - /** - * Returns the FileMaker field ID (FMFID) for a given field name, or the field name itself if not using IDs. - * @param fieldName - The field name to get the ID for - * @returns The FMFID string or the original field name - */ - getFieldId(fieldName: keyof Schema): string { - if (this.fmfIds && fieldName in this.fmfIds) { - return this.fmfIds[fieldName]; - } - return String(fieldName); - } - - /** - * Returns the field name for a given FileMaker field ID (FMFID), or the ID itself if not found. - * @param fieldId - The FMFID to get the field name for - * @returns The field name or the original ID - */ - getFieldName(fieldId: string): string { - if (this.fmfIds) { - // Search for the field name that corresponds to this FMFID - for (const [fieldName, fmfId] of Object.entries(this.fmfIds)) { - if (fmfId === fieldId) { - return fieldName; - } - } - } - return fieldId; - } - - /** - * Returns true if this BaseTable is using FileMaker field IDs. - */ - isUsingFieldIds(): boolean { - return this.fmfIds !== undefined; - } -} - -/** - * Creates a BaseTable with proper TypeScript type inference. - * - * Use this function to create BaseTable instances with full type safety. - * - * @example Without entity IDs - * ```ts - * const users = defineBaseTable({ - * schema: { id: z.string(), name: z.string() }, - * idField: "id", - * }); - * ``` - * - * @example With entity IDs (FileMaker field IDs) - * ```ts - * const products = defineBaseTable({ - * schema: { id: z.string(), name: z.string() }, - * idField: "id", - * fmfIds: { id: "FMFID:1", name: "FMFID:2" }, - * }); - * ``` - */ -export function defineBaseTable< - const Schema extends Record, - IdField extends keyof Schema | undefined = undefined, - const Required extends readonly ( - | keyof Schema - | (string & {}) - )[] = readonly [], - const ReadOnly extends readonly ( - | keyof Schema - | (string & {}) - )[] = readonly [], ->(config: { - schema: Schema; - idField?: IdField; - required?: Required; - readOnly?: ReadOnly; - fmfIds?: { [K in keyof Schema | (string & {})]: `FMFID:${string}` }; -}): BaseTable { - return new BaseTable(config); -} diff --git a/packages/fmodata/src/client/batch-builder.ts b/packages/fmodata/src/client/batch-builder.ts index 33741a16..8446f9bc 100644 --- a/packages/fmodata/src/client/batch-builder.ts +++ b/packages/fmodata/src/client/batch-builder.ts @@ -5,6 +5,7 @@ import type { ExecuteOptions, BatchResult, BatchItemResult, + ExecuteMethodOptions, } from "../types"; import { BatchTruncatedError } from "../errors"; import { type FFetchOptions } from "@fetchkit/ffetch"; @@ -148,7 +149,7 @@ export class BatchBuilder[]> { * @returns A BatchResult containing individual results for each operation */ async execute( - options?: RequestInit & FFetchOptions & EO, + options?: ExecuteMethodOptions, ): Promise>> { const baseUrl = this.context._getBaseUrl?.(); if (!baseUrl) { diff --git a/packages/fmodata/src/client/build-occurrences.ts b/packages/fmodata/src/client/build-occurrences.ts deleted file mode 100644 index 9263a716..00000000 --- a/packages/fmodata/src/client/build-occurrences.ts +++ /dev/null @@ -1,155 +0,0 @@ -import { TableOccurrence } from "./table-occurrence"; - -/** - * Extract the name type from a TableOccurrence - */ -type ExtractName = - TO extends TableOccurrence ? Name : never; - -/** - * Extract all names from an array of TableOccurrences as a union - */ -type ExtractNames[]> = - ExtractName; - -/** - * Find a TableOccurrence by name from an array - */ -type FindByName< - TOs extends readonly TableOccurrence[], - Name extends string, -> = Extract>; - -/** - * Navigation configuration - maps TO names to arrays of navigation target names. - * A table occurrence cannot navigate to itself. - */ -type NavigationConfig< - TOs extends readonly TableOccurrence[], -> = { - [K in ExtractNames]?: Exclude, K>[]; -}; - -/** - * Resolve navigation config to actual TO record for a given TO name - */ -type ResolveNavForTO< - TOs extends readonly TableOccurrence[], - Nav extends NavigationConfig | undefined, - Name extends ExtractNames, -> = - Nav extends NavigationConfig - ? Nav[Name] extends infer NavNames extends string[] - ? { - [K in NavNames[number]]: FindByName; - } - : {} - : {}; - -/** - * Build the result type - a tuple of TOs with navigation resolved - */ -type BuildResult< - TOs extends readonly TableOccurrence[], - Nav extends NavigationConfig | undefined, -> = { - [K in keyof TOs]: TOs[K] extends TableOccurrence< - infer BT, - infer Name, - any, - infer DefSelect - > - ? Name extends ExtractNames - ? TableOccurrence, DefSelect> - : TOs[K] - : TOs[K]; -}; - -/** - * Configuration object for buildOccurrences - */ -type BuildOccurrencesConfig< - TOs extends readonly TableOccurrence[], -> = { - occurrences: TOs; - navigation?: NavigationConfig; -}; - -/** - * Builds TableOccurrences with navigation relationships resolved. - * - * This is the second phase of TO definition - after defining base TOs, - * use this function to link them with navigation relationships. - * - * @example Full navigation - * ```ts - * const [contacts, users] = buildOccurrences({ - * occurrences: [_contacts, _users], - * navigation: { - * contacts: ["users"], - * users: ["contacts"], - * }, - * }); - * ``` - * - * @example Partial navigation - * ```ts - * const [contacts, users] = buildOccurrences({ - * occurrences: [_contacts, _users], - * navigation: { - * contacts: ["users"], - * }, - * }); - * ``` - * - * @example No navigation - * ```ts - * const [contacts, users] = buildOccurrences({ - * occurrences: [_contacts, _users], - * }); - * ``` - * - * @param config - Configuration object with occurrences array and optional navigation - * @returns Tuple of TableOccurrences with navigation resolved (same order as input) - */ -export function buildOccurrences< - const TOs extends readonly TableOccurrence[], - const Nav extends NavigationConfig | undefined, ->(config: { occurrences: TOs; navigation?: Nav }): BuildResult { - const { occurrences, navigation } = config; - - // Build a map of name -> TO for quick lookup - const toByName = new Map>(); - for (const to of occurrences) { - toByName.set(to.name, to); - } - - // Build result array with navigation resolved - const result = occurrences.map((to) => { - const navNames = navigation?.[to.name as keyof typeof navigation] as - | string[] - | undefined; - - // Resolve navigation names to actual TOs - const resolvedNav: Record> = {}; - if (navNames) { - for (const navName of navNames) { - // Prevent self-navigation - if (navName === to.name) { - throw new Error( - `TableOccurrence "${to.name}" cannot navigate to itself`, - ); - } - const targetTO = toByName.get(navName); - if (targetTO) { - resolvedNav[navName] = targetTO; - } - } - } - - // Create new TO with navigation using internal method - return TableOccurrence._withNavigation(to, resolvedNav); - }); - - return result as BuildResult; -} diff --git a/packages/fmodata/src/client/builders/default-select.ts b/packages/fmodata/src/client/builders/default-select.ts new file mode 100644 index 00000000..0256db05 --- /dev/null +++ b/packages/fmodata/src/client/builders/default-select.ts @@ -0,0 +1,69 @@ +import type { FMTable } from "../../orm/table"; +import { FMTable as FMTableClass } from "../../orm/table"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { getBaseTableConfig } from "../../orm/table"; +import { isColumn } from "../../orm/column"; + +/** + * Helper function to get container field names from a table. + * Container fields cannot be selected via $select in FileMaker OData API. + */ +function getContainerFieldNames(table: FMTable): string[] { + const baseTableConfig = getBaseTableConfig(table); + if (!baseTableConfig || !baseTableConfig.containerFields) { + return []; + } + return baseTableConfig.containerFields as string[]; +} + +/** + * Gets default select fields from a table definition. + * Returns undefined if defaultSelect is "all". + * Automatically filters out container fields since they cannot be selected via $select. + */ +export function getDefaultSelectFields( + table: FMTable | undefined, +): string[] | undefined { + if (!table) return undefined; + + const defaultSelect = (table as any)[FMTableClass.Symbol.DefaultSelect]; + const containerFields = getContainerFieldNames(table); + + if (defaultSelect === "schema") { + const baseTableConfig = getBaseTableConfig(table); + const allFields = Object.keys(baseTableConfig.schema); + // Filter out container fields + return [...new Set(allFields.filter((f) => !containerFields.includes(f)))]; + } + + if (Array.isArray(defaultSelect)) { + // Filter out container fields + return [ + ...new Set(defaultSelect.filter((f) => !containerFields.includes(f))), + ]; + } + + // Check if defaultSelect is a Record (resolved from function) + if ( + typeof defaultSelect === "object" && + defaultSelect !== null && + !Array.isArray(defaultSelect) + ) { + // Extract field names from Column instances + const fieldNames: string[] = []; + for (const value of Object.values(defaultSelect)) { + if (isColumn(value)) { + fieldNames.push(value.fieldName); + } + } + if (fieldNames.length > 0) { + // Filter out container fields + return [ + ...new Set(fieldNames.filter((f) => !containerFields.includes(f))), + ]; + } + } + + // defaultSelect is "all" or undefined + return undefined; +} diff --git a/packages/fmodata/src/client/builders/expand-builder.ts b/packages/fmodata/src/client/builders/expand-builder.ts new file mode 100644 index 00000000..89d5ae20 --- /dev/null +++ b/packages/fmodata/src/client/builders/expand-builder.ts @@ -0,0 +1,245 @@ +import { QueryOptions } from "odata-query"; +import buildQuery from "odata-query"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { FMTable } from "../../orm/table"; +import { + getBaseTableConfig, + getTableName, + getNavigationPaths, +} from "../../orm/table"; +import type { ExpandValidationConfig } from "../../validation"; +import type { ExpandConfig } from "./shared-types"; +import { formatSelectFields } from "./select-utils"; +import { getDefaultSelectFields } from "./default-select"; +import { InternalLogger } from "../../logger"; + +/** + * Builds OData expand query strings and validation configs. + * Handles nested expands recursively and transforms relation names to FMTIDs + * when using entity IDs. + */ +export class ExpandBuilder { + constructor( + private useEntityIds: boolean, + private logger: InternalLogger, + ) {} + + /** + * Builds OData $expand query string from expand configurations. + */ + buildExpandString(configs: ExpandConfig[]): string { + if (configs.length === 0) return ""; + + return configs.map((config) => this.buildSingleExpand(config)).join(","); + } + + /** + * Builds validation configs for expanded navigation properties. + */ + buildValidationConfigs(configs: ExpandConfig[]): ExpandValidationConfig[] { + return configs.map((config) => { + const targetTable = config.targetTable; + + let targetSchema: Record | undefined; + if (targetTable) { + const baseTableConfig = getBaseTableConfig(targetTable); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + const schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + + targetSchema = schema; + } + + const selectedFields = config.options?.select + ? Array.isArray(config.options.select) + ? config.options.select.map(String) + : [String(config.options.select)] + : undefined; + + // Recursively build validation configs for nested expands + const nestedExpands = config.nestedExpandConfigs + ? this.buildValidationConfigs(config.nestedExpandConfigs) + : undefined; + + return { + relation: config.relation, + targetSchema, + targetTable, + table: targetTable, + selectedFields, + nestedExpands, + }; + }); + } + + /** + * Process an expand() call and return the expand config. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param targetTable - The target table to expand to + * @param sourceTable - The source table (for validation) + * @param callback - Optional callback to configure the expand query + * @param builderFactory - Function that creates a QueryBuilder for the target table + * @returns ExpandConfig to add to the builder's expandConfigs array + */ + processExpand, Builder = any>( + targetTable: TargetTable, + sourceTable: FMTable | undefined, + callback?: (builder: Builder) => Builder, + builderFactory?: () => Builder, + ): ExpandConfig { + // Extract name and validate + const relationName = getTableName(targetTable); + + // Runtime validation: Check if relation name is in navigationPaths + if (sourceTable) { + const navigationPaths = getNavigationPaths(sourceTable); + if (navigationPaths && !navigationPaths.includes(relationName)) { + this.logger.warn( + `Cannot expand to "${relationName}". Valid navigation paths: ${navigationPaths.length > 0 ? navigationPaths.join(", ") : "none"}`, + ); + } + } + + if (callback && builderFactory) { + // Create a new QueryBuilder for the target table + const targetBuilder = builderFactory(); + + // Pass to callback and get configured builder + const configuredBuilder = callback(targetBuilder); + + // Extract the builder's query options + const expandOptions: Partial> = { + ...(configuredBuilder as any).queryOptions, + }; + + // If callback didn't provide select, apply defaultSelect from target table + if (!expandOptions.select) { + const defaultFields = getDefaultSelectFields(targetTable); + if (defaultFields) { + expandOptions.select = defaultFields; + } + } + + // If the configured builder has nested expands, we need to include them + const nestedExpandConfigs = (configuredBuilder as any).expandConfigs; + if (nestedExpandConfigs?.length > 0) { + // Build nested expand string from the configured builder's expand configs + const nestedExpandString = this.buildExpandString(nestedExpandConfigs); + if (nestedExpandString) { + // Add nested expand to options + expandOptions.expand = nestedExpandString as any; + } + } + + return { + relation: relationName, + options: expandOptions, + targetTable, + nestedExpandConfigs: nestedExpandConfigs?.length > 0 ? nestedExpandConfigs : undefined, + }; + } else { + // Simple expand without callback - apply defaultSelect if available + const defaultFields = getDefaultSelectFields(targetTable); + if (defaultFields) { + return { + relation: relationName, + options: { select: defaultFields }, + targetTable, + }; + } else { + return { + relation: relationName, + targetTable, + }; + } + } + } + + /** + * Builds a single expand string with its options. + */ + private buildSingleExpand(config: ExpandConfig): string { + const relationName = this.resolveRelationName(config); + const parts = this.buildExpandParts(config); + + if (parts.length === 0) { + return relationName; + } + + return `${relationName}(${parts.join(";")})`; + } + + /** + * Resolves relation name, using FMTID if entity IDs are enabled. + */ + private resolveRelationName(config: ExpandConfig): string { + if (!this.useEntityIds) { + return config.relation; + } + + const targetTable = config.targetTable; + if (targetTable && FMTable.Symbol.EntityId in targetTable) { + const tableId = (targetTable as any)[FMTable.Symbol.EntityId] as + | `FMTID:${string}` + | undefined; + if (tableId) { + return tableId; + } + } + + return config.relation; + } + + /** + * Builds expand parts (select, filter, orderBy, etc.) for a single expand. + */ + private buildExpandParts(config: ExpandConfig): string[] { + if (!config.options || Object.keys(config.options).length === 0) { + return []; + } + + const parts: string[] = []; + const opts = config.options; + + if (opts.select) { + const selectArray = Array.isArray(opts.select) + ? opts.select.map(String) + : [String(opts.select)]; + const selectFields = formatSelectFields( + selectArray, + config.targetTable, + this.useEntityIds, + ); + parts.push(`$select=${selectFields}`); + } + + if (opts.filter) { + const filterQuery = buildQuery({ filter: opts.filter }); + const match = filterQuery.match(/\$filter=([^&]+)/); + if (match) parts.push(`$filter=${match[1]}`); + } + + if (opts.orderBy) { + const orderByValue = Array.isArray(opts.orderBy) + ? opts.orderBy.join(",") + : String(opts.orderBy); + parts.push(`$orderby=${orderByValue}`); + } + + if (opts.top !== undefined) parts.push(`$top=${opts.top}`); + if (opts.skip !== undefined) parts.push(`$skip=${opts.skip}`); + + if (opts.expand) { + if (typeof opts.expand === "string") { + parts.push(`$expand=${opts.expand}`); + } + } + + return parts; + } +} diff --git a/packages/fmodata/src/client/builders/index.ts b/packages/fmodata/src/client/builders/index.ts new file mode 100644 index 00000000..385ad688 --- /dev/null +++ b/packages/fmodata/src/client/builders/index.ts @@ -0,0 +1,11 @@ +// Re-export all shared builder utilities +export * from "./shared-types"; +export * from "./table-utils"; +export * from "./select-utils"; +export * from "./select-mixin"; +export * from "./expand-builder"; +export * from "./response-processor"; +export * from "./default-select"; +export * from "./query-string-builder"; + + diff --git a/packages/fmodata/src/client/builders/query-string-builder.ts b/packages/fmodata/src/client/builders/query-string-builder.ts new file mode 100644 index 00000000..a9fb68df --- /dev/null +++ b/packages/fmodata/src/client/builders/query-string-builder.ts @@ -0,0 +1,43 @@ +import type { FMTable } from "../../orm/table"; +import { ExpandBuilder } from "./expand-builder"; +import type { ExpandConfig } from "./shared-types"; +import { formatSelectFields } from "./select-utils"; +import { InternalLogger } from "../../logger"; + +/** + * Builds OData query string for $select and $expand parameters. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param config - Configuration object + * @returns Query string starting with ? or empty string if no parameters + */ +export function buildSelectExpandQueryString(config: { + selectedFields?: string[]; + expandConfigs: ExpandConfig[]; + table?: FMTable; + useEntityIds: boolean; + logger: InternalLogger; +}): string { + const parts: string[] = []; + const expandBuilder = new ExpandBuilder(config.useEntityIds, config.logger); + + // Build $select + if (config.selectedFields && config.selectedFields.length > 0) { + const selectString = formatSelectFields( + config.selectedFields, + config.table, + config.useEntityIds, + ); + if (selectString) { + parts.push(`$select=${selectString}`); + } + } + + // Build $expand + const expandString = expandBuilder.buildExpandString(config.expandConfigs); + if (expandString) { + parts.push(`$expand=${expandString}`); + } + + return parts.length > 0 ? `?${parts.join("&")}` : ""; +} diff --git a/packages/fmodata/src/client/builders/response-processor.ts b/packages/fmodata/src/client/builders/response-processor.ts new file mode 100644 index 00000000..783b1a72 --- /dev/null +++ b/packages/fmodata/src/client/builders/response-processor.ts @@ -0,0 +1,276 @@ +import type { FMTable } from "../../orm/table"; +import type { Result } from "../../types"; +import type { ExpandValidationConfig } from "../../validation"; +import { validateSingleResponse, validateListResponse } from "../../validation"; +import { transformResponseFields } from "../../transform"; +import { RecordCountMismatchError } from "../../errors"; +import { getBaseTableConfig } from "../../orm/table"; +import { ExpandBuilder } from "./expand-builder"; +import type { ExpandConfig } from "./shared-types"; +import { InternalLogger } from "../../logger"; + +export interface ProcessResponseConfig { + table?: FMTable; + schema?: Record; + singleMode: "exact" | "maybe" | false; + selectedFields?: string[]; + expandValidationConfigs?: ExpandValidationConfig[]; + skipValidation?: boolean; + useEntityIds?: boolean; + // Mapping from field names to output keys (for renamed fields in select) + fieldMapping?: Record; +} + +/** + * Processes OData response with transformation and validation. + * Shared by QueryBuilder and RecordBuilder. + */ +export async function processODataResponse( + rawResponse: any, + config: ProcessResponseConfig, +): Promise> { + const { + table, + schema, + singleMode, + selectedFields, + expandValidationConfigs, + skipValidation, + useEntityIds, + fieldMapping, + } = config; + + // Transform field IDs back to names if using entity IDs + let response = rawResponse; + if (table && useEntityIds) { + response = transformResponseFields( + response, + table, + expandValidationConfigs, + ); + } + + // Fast path: skip validation + if (skipValidation) { + const result = extractRecords(response, singleMode); + // Rename fields AFTER extraction (but before returning) + if (result.data && fieldMapping && Object.keys(fieldMapping).length > 0) { + if (result.error) { + return { data: undefined, error: result.error } as Result; + } + return { + data: renameFieldsInResponse(result.data, fieldMapping) as T, + error: undefined, + }; + } + return result as Result; + } + + // Validation path + if (singleMode !== false) { + const validation = await validateSingleResponse( + response, + schema, + selectedFields as any, + expandValidationConfigs, + singleMode, + ); + + if (!validation.valid) { + return { data: undefined, error: validation.error }; + } + + // Rename fields AFTER validation completes + if (fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + data: renameFieldsInResponse(validation.data, fieldMapping) as T, + error: undefined, + }; + } + + return { data: validation.data as T, error: undefined }; + } + + const validation = await validateListResponse( + response, + schema, + selectedFields as any, + expandValidationConfigs, + ); + + if (!validation.valid) { + return { data: undefined, error: validation.error }; + } + + // Rename fields AFTER validation completes + if (fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + data: renameFieldsInResponse(validation.data, fieldMapping) as T, + error: undefined, + }; + } + + return { data: validation.data as T, error: undefined }; +} + +/** + * Extracts records from response without validation. + */ +function extractRecords( + response: any, + singleMode: "exact" | "maybe" | false, +): Result { + if (singleMode === false) { + const records = response.value ?? []; + return { data: records as T, error: undefined }; + } + + const records = response.value ?? [response]; + const count = Array.isArray(records) ? records.length : 1; + + if (count > 1) { + return { + data: undefined, + error: new RecordCountMismatchError( + singleMode === "exact" ? "one" : "at-most-one", + count, + ), + }; + } + + if (count === 0) { + if (singleMode === "exact") { + return { data: undefined, error: new RecordCountMismatchError("one", 0) }; + } + return { data: null as T, error: undefined }; + } + + const record = Array.isArray(records) ? records[0] : records; + return { data: record as T, error: undefined }; +} + +/** + * Gets schema from a table occurrence, excluding container fields. + * Container fields are never returned in regular responses (only via getSingleField). + */ +export function getSchemaFromTable( + table: FMTable | undefined, +): Record | undefined { + if (!table) return undefined; + const baseTableConfig = getBaseTableConfig(table); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + const schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + + return schema; +} + +/** + * Renames fields in response data according to the field mapping. + * Used when select() is called with renamed fields (e.g., { userEmail: users.email }). + */ +function renameFieldsInResponse( + data: any, + fieldMapping: Record, +): any { + if (!data || typeof data !== "object") { + return data; + } + + // Handle array responses + if (Array.isArray(data)) { + return data.map((item) => renameFieldsInResponse(item, fieldMapping)); + } + + // Handle OData list response structure + if ("value" in data && Array.isArray(data.value)) { + return { + ...data, + value: data.value.map((item: any) => + renameFieldsInResponse(item, fieldMapping), + ), + }; + } + + // Handle single record + const renamed: Record = {}; + for (const [key, value] of Object.entries(data)) { + // Check if this field should be renamed + const outputKey = fieldMapping[key]; + if (outputKey) { + renamed[outputKey] = value; + } else { + renamed[key] = value; + } + } + return renamed; +} + +/** + * Processes query response with expand configs. + * This is a convenience wrapper that builds validation configs from expand configs. + */ +export async function processQueryResponse( + response: any, + config: { + occurrence?: FMTable; + singleMode: "exact" | "maybe" | false; + queryOptions: { select?: (keyof T)[] | string[] }; + expandConfigs: ExpandConfig[]; + skipValidation?: boolean; + useEntityIds?: boolean; + // Mapping from field names to output keys (for renamed fields in select) + fieldMapping?: Record; + logger: InternalLogger; + }, +): Promise> { + const { + occurrence, + singleMode, + queryOptions, + expandConfigs, + skipValidation, + useEntityIds, + fieldMapping, + logger, + } = config; + + const expandBuilder = new ExpandBuilder(useEntityIds ?? false, logger); + const expandValidationConfigs = + expandBuilder.buildValidationConfigs(expandConfigs); + + const selectedFields = queryOptions.select + ? Array.isArray(queryOptions.select) + ? queryOptions.select.map(String) + : [String(queryOptions.select)] + : undefined; + + // Process the response first + let processedResponse = await processODataResponse(response, { + table: occurrence, + schema: getSchemaFromTable(occurrence), + singleMode, + selectedFields, + expandValidationConfigs, + skipValidation, + useEntityIds, + }); + + // Rename fields if field mapping is provided (for renamed fields in select) + if ( + processedResponse.data && + fieldMapping && + Object.keys(fieldMapping).length > 0 + ) { + processedResponse = { + ...processedResponse, + data: renameFieldsInResponse(processedResponse.data, fieldMapping), + }; + } + + return processedResponse; +} diff --git a/packages/fmodata/src/client/builders/select-mixin.ts b/packages/fmodata/src/client/builders/select-mixin.ts new file mode 100644 index 00000000..0b491cb1 --- /dev/null +++ b/packages/fmodata/src/client/builders/select-mixin.ts @@ -0,0 +1,75 @@ +import { InternalLogger } from "../../logger"; +import { isColumn, type Column } from "../../orm/column"; + +/** + * Utility function for processing select() calls. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param fields - Field names or Column references + * @returns Object with selectedFields array + */ +export function processSelectFields( + ...fields: (string | Column)[] +): { selectedFields: string[] } { + const fieldNames = fields.map((field) => { + if (isColumn(field)) { + return field.fieldName as string; + } + return String(field); + }); + return { selectedFields: [...new Set(fieldNames)] }; +} + +/** + * Processes select() calls with field renaming support. + * Validates columns belong to the correct table and builds field mapping for renamed fields. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param fields - Object mapping output keys to column references + * @param tableName - Expected table name for validation + * @returns Object with selectedFields array and fieldMapping for renamed fields + */ +export function processSelectWithRenames( + fields: Record>, + tableName: string, + logger: InternalLogger, +): { selectedFields: string[]; fieldMapping: Record } { + const selectedFields: string[] = []; + const fieldMapping: Record = {}; + + for (const [outputKey, column] of Object.entries(fields)) { + if (!isColumn(column)) { + throw new Error( + `select() expects column references, but got: ${typeof column}`, + ); + } + + // Warn (not throw) on table mismatch for consistency + if (column.tableName !== tableName) { + logger.warn( + `Column ${column.toString()} is from table "${column.tableName}", but query is for table "${tableName}"`, + ); + } + + const fieldName = column.fieldName; + selectedFields.push(fieldName); + + // Build mapping from field name to output key (only if renamed) + if (fieldName !== outputKey) { + fieldMapping[fieldName] = outputKey; + } + } + + return { + selectedFields, + fieldMapping: Object.keys(fieldMapping).length > 0 ? fieldMapping : {}, + }; +} + +/** + * Legacy class name for backward compatibility. + * @deprecated Use processSelectFields function instead + */ +export class SelectMixin { + static processSelect = processSelectFields; +} diff --git a/packages/fmodata/src/client/builders/select-utils.ts b/packages/fmodata/src/client/builders/select-utils.ts new file mode 100644 index 00000000..360cf025 --- /dev/null +++ b/packages/fmodata/src/client/builders/select-utils.ts @@ -0,0 +1,56 @@ +import type { FMTable } from "../../orm/table"; +import { transformFieldNamesArray } from "../../transform"; + +/** + * Determines if a field name needs to be quoted in OData queries. + * Per FileMaker docs: field names with special characters (spaces, underscores, etc.) must be quoted. + * Also quotes "id" as it's an OData reserved word. + * Entity IDs (FMFID:*, FMTID:*) are not quoted as they're identifiers, not field names. + * + * @param fieldName - The field name or identifier to check + * @returns true if the field name should be quoted in OData queries + */ +export function needsFieldQuoting(fieldName: string): boolean { + // Entity IDs are identifiers and don't need quoting + if (fieldName.startsWith("FMFID:") || fieldName.startsWith("FMTID:")) { + return false; + } + // Always quote "id" as it's an OData reserved word + if (fieldName === "id") return true; + // Quote if field name contains spaces, underscores, or other special characters + return ( + fieldName.includes(" ") || + fieldName.includes("_") || + !/^[a-zA-Z][a-zA-Z0-9]*$/.test(fieldName) + ); +} + +/** + * Formats select fields for use in OData query strings. + * - Transforms field names to FMFIDs if using entity IDs + * - Wraps "id" fields in double quotes (OData reserved) + * - URL-encodes special characters but preserves spaces + */ +export function formatSelectFields( + select: string[] | readonly string[] | undefined, + table?: FMTable, + useEntityIds?: boolean, +): string { + if (!select || select.length === 0) return ""; + + const selectArray = Array.isArray(select) ? select : [select]; + + // Transform to field IDs if using entity IDs + const transformedFields = + table && useEntityIds + ? transformFieldNamesArray(selectArray.map(String), table) + : selectArray.map(String); + + return transformedFields + .map((field) => { + if (needsFieldQuoting(field)) return `"${field}"`; + const encoded = encodeURIComponent(field); + return encoded.replace(/%20/g, " "); + }) + .join(","); +} diff --git a/packages/fmodata/src/client/builders/shared-types.ts b/packages/fmodata/src/client/builders/shared-types.ts new file mode 100644 index 00000000..ffc5ed4e --- /dev/null +++ b/packages/fmodata/src/client/builders/shared-types.ts @@ -0,0 +1,42 @@ +import type { QueryOptions } from "odata-query"; +import type { ExecutionContext } from "../../types"; +import type { FMTable } from "../../orm/table"; + +/** + * Expand configuration used by both QueryBuilder and RecordBuilder + */ +export type ExpandConfig = { + relation: string; + options?: Partial>; + targetTable?: FMTable; + nestedExpandConfigs?: ExpandConfig[]; +}; + +/** + * Type to represent expanded relations in return types + */ +export type ExpandedRelations = Record; + +/** + * Navigation context shared between builders + */ +export interface NavigationContext { + isNavigate?: boolean; + navigateRecordId?: string | number; + navigateRelation?: string; + navigateSourceTableName?: string; + navigateBaseRelation?: string; + navigateBasePath?: string; +} + +/** + * Common builder configuration + */ +export interface BuilderConfig | undefined> { + occurrence?: Occ; + tableName: string; + databaseName: string; + context: ExecutionContext; + databaseUseEntityIds?: boolean; +} + diff --git a/packages/fmodata/src/client/builders/table-utils.ts b/packages/fmodata/src/client/builders/table-utils.ts new file mode 100644 index 00000000..91e1a11b --- /dev/null +++ b/packages/fmodata/src/client/builders/table-utils.ts @@ -0,0 +1,87 @@ +import type { ExecutionContext } from "../../types"; +import { getAcceptHeader } from "../../types"; +import type { FMTable } from "../../orm/table"; +import { + getTableName, + getTableId as getTableIdHelper, + isUsingEntityIds, +} from "../../orm/table"; +import type { FFetchOptions } from "@fetchkit/ffetch"; +import type { ExecuteOptions } from "../../types"; + +/** + * Resolves table identifier based on entity ID settings. + * Used by both QueryBuilder and RecordBuilder. + */ +export function resolveTableId( + table: FMTable | undefined, + fallbackTableName: string, + context: ExecutionContext, + useEntityIdsOverride?: boolean, +): string { + if (!table) { + return fallbackTableName; + } + + const contextDefault = context._getUseEntityIds?.() ?? false; + const shouldUseIds = useEntityIdsOverride ?? contextDefault; + + if (shouldUseIds) { + if (!isUsingEntityIds(table)) { + throw new Error( + `useEntityIds is true but table "${getTableName(table)}" does not have entity IDs configured`, + ); + } + return getTableIdHelper(table); + } + + return getTableName(table); +} + +/** + * Merges database-level useEntityIds with per-request options. + */ +export function mergeEntityIdOptions>( + options: T | undefined, + databaseDefault: boolean, +): T & { useEntityIds?: boolean } { + return { + ...options, + useEntityIds: (options as any)?.useEntityIds ?? databaseDefault, + } as T & { useEntityIds?: boolean }; +} + +/** + * Type-safe helper for merging execute options with entity ID settings + */ +export function mergeExecuteOptions( + options: (RequestInit & FFetchOptions & ExecuteOptions) | undefined, + databaseUseEntityIds: boolean, +): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + return mergeEntityIdOptions(options, databaseUseEntityIds); +} + +/** + * Creates an OData Request object with proper headers. + * Used by both QueryBuilder and RecordBuilder to eliminate duplication. + * + * @param baseUrl - Base URL for the request + * @param config - Request configuration with method and url + * @param options - Optional execution options + * @returns Request object ready to use + */ +export function createODataRequest( + baseUrl: string, + config: { method: string; url: string }, + options?: { includeODataAnnotations?: boolean }, +): Request { + const fullUrl = `${baseUrl}${config.url}`; + + return new Request(fullUrl, { + method: config.method, + headers: { + "Content-Type": "application/json", + Accept: getAcceptHeader(options?.includeODataAnnotations), + }, + }); +} diff --git a/packages/fmodata/src/client/database.ts b/packages/fmodata/src/client/database.ts index 41b851e8..b4eba64d 100644 --- a/packages/fmodata/src/client/database.ts +++ b/packages/fmodata/src/client/database.ts @@ -1,50 +1,11 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"; import type { ExecutionContext, ExecutableBuilder, Metadata } from "../types"; -import type { BaseTable } from "./base-table"; -import type { TableOccurrence } from "./table-occurrence"; import { EntitySet } from "./entity-set"; import { BatchBuilder } from "./batch-builder"; import { SchemaManager } from "./schema-manager"; +import { FMTable } from "../orm/table"; -// Helper type to extract schema from a TableOccurrence -type ExtractSchemaFromOccurrence = - O extends TableOccurrence - ? BT extends BaseTable - ? S - : never - : never; - -// Helper type to find an occurrence by name in the occurrences tuple -type FindOccurrenceByName< - Occurrences extends readonly TableOccurrence[], - Name extends string, -> = Occurrences extends readonly [ - infer First, - ...infer Rest extends readonly TableOccurrence[], -] - ? First extends TableOccurrence - ? First["name"] extends Name - ? First - : FindOccurrenceByName - : never - : never; - -// Helper type to extract all occurrence names from the tuple -type ExtractOccurrenceNames< - Occurrences extends readonly TableOccurrence[], -> = Occurrences extends readonly [] - ? string // If no occurrences, allow any string - : Occurrences[number]["name"]; // Otherwise, extract union of names - -export class Database< - Occurrences extends readonly TableOccurrence< - any, - any, - any, - any - >[] = readonly [], -> { - private occurrenceMap: Map>; +export class Database { private _useEntityIds: boolean = false; public readonly schema: SchemaManager; @@ -52,7 +13,6 @@ export class Database< private readonly databaseName: string, private readonly context: ExecutionContext, config?: { - occurrences?: Occurrences | undefined; /** * Whether to use entity IDs instead of field names in the actual requests to the server * Defaults to true if all occurrences use entity IDs, false otherwise @@ -61,129 +21,28 @@ export class Database< useEntityIds?: boolean; }, ) { - this.occurrenceMap = new Map(); - if (config?.occurrences) { - // Validate consistency: either all occurrences use entity IDs or none do - const occurrencesWithIds: string[] = []; - const occurrencesWithoutIds: string[] = []; - - for (const occ of config.occurrences) { - this.occurrenceMap.set(occ.name, occ); - - const hasTableId = occ.isUsingTableId(); - const hasFieldIds = occ.baseTable.isUsingFieldIds(); - - // An occurrence uses entity IDs if it has both fmtId and fmfIds - if (hasTableId && hasFieldIds) { - occurrencesWithIds.push(occ.name); - } else if (!hasTableId && !hasFieldIds) { - occurrencesWithoutIds.push(occ.name); - } else { - // Partial entity ID usage (only one of fmtId or fmfIds) - this is an error - throw new Error( - `TableOccurrence "${occ.name}" has inconsistent entity ID configuration. ` + - `Both fmtId (${hasTableId ? "present" : "missing"}) and fmfIds (${hasFieldIds ? "present" : "missing"}) must be defined together.`, - ); - } - } - - // Determine default value: true if all occurrences use entity IDs, false otherwise - const allOccurrencesUseEntityIds = - occurrencesWithIds.length > 0 && occurrencesWithoutIds.length === 0; - const hasMixedUsage = - occurrencesWithIds.length > 0 && occurrencesWithoutIds.length > 0; - - // Handle explicit useEntityIds config - if (config.useEntityIds !== undefined) { - if (config.useEntityIds === false) { - // If explicitly set to false, allow mixed usage and use false - this._useEntityIds = false; - } else if (config.useEntityIds === true) { - // If explicitly set to true, validate that all occurrences use entity IDs - if (hasMixedUsage || occurrencesWithoutIds.length > 0) { - throw new Error( - `useEntityIds is set to true but some occurrences do not use entity IDs. ` + - `Occurrences without entity IDs: [${occurrencesWithoutIds.join(", ")}]. ` + - `Either set useEntityIds to false or configure all occurrences with entity IDs.`, - ); - } - this._useEntityIds = true; - } - } else { - // Default: true if all occurrences use entity IDs, false otherwise - // But throw error if there's mixed usage when using defaults - if (hasMixedUsage) { - throw new Error( - `Cannot mix TableOccurrence instances with and without entity IDs in the same database. ` + - `Occurrences with entity IDs: [${occurrencesWithIds.join(", ")}]. ` + - `Occurrences without entity IDs: [${occurrencesWithoutIds.join(", ")}]. ` + - `Either all table occurrences must use entity IDs (fmtId + fmfIds), none should, or explicitly set useEntityIds to false.`, - ); - } - this._useEntityIds = allOccurrencesUseEntityIds; - } - } else { - // No occurrences provided, use explicit config or default to false - this._useEntityIds = config?.useEntityIds ?? false; - } - - // Inform the execution context whether to use entity IDs - if (this.context._setUseEntityIds) { - this.context._setUseEntityIds(this._useEntityIds); - } - // Initialize schema manager this.schema = new SchemaManager(this.databaseName, this.context); + this._useEntityIds = config?.useEntityIds ?? false; } - /** - * Returns true if any table occurrence in this database is using entity IDs. - */ - isUsingEntityIds(): boolean { - return this._useEntityIds; - } - - /** - * Gets a table occurrence by name. - * @internal - */ - getOccurrence(name: string): TableOccurrence | undefined { - return this.occurrenceMap.get(name); - } - - from | (string & {})>( - name: Name, - ): Occurrences extends readonly [] - ? EntitySet, undefined> - : Name extends ExtractOccurrenceNames - ? EntitySet< - ExtractSchemaFromOccurrence>, - FindOccurrenceByName - > - : EntitySet, undefined> { - const occurrence = this.occurrenceMap.get(name as string); - - if (occurrence) { - // Use EntitySet.create to preserve types better - type OccType = FindOccurrenceByName; - type SchemaType = ExtractSchemaFromOccurrence; - - return EntitySet.create({ - occurrence: occurrence as OccType, - tableName: name as string, - databaseName: this.databaseName, - context: this.context, - database: this, - }) as any; - } else { - // Return untyped EntitySet for dynamic table access - return new EntitySet, undefined>({ - tableName: name as string, - databaseName: this.databaseName, - context: this.context, - database: this, - }) as any; + from>(table: T): EntitySet { + // Only override database-level useEntityIds if table explicitly sets it + // (not if it's undefined, which would override the database setting) + if ( + Object.prototype.hasOwnProperty.call(table, FMTable.Symbol.UseEntityIds) + ) { + const tableUseEntityIds = (table as any)[FMTable.Symbol.UseEntityIds]; + if (typeof tableUseEntityIds === "boolean") { + this._useEntityIds = tableUseEntityIds; + } } + return new EntitySet({ + occurrence: table as T, + databaseName: this.databaseName, + context: this.context, + database: this, + }); } /** diff --git a/packages/fmodata/src/client/delete-builder.ts b/packages/fmodata/src/client/delete-builder.ts index 30f0657f..0df96248 100644 --- a/packages/fmodata/src/client/delete-builder.ts +++ b/packages/fmodata/src/client/delete-builder.ts @@ -4,34 +4,36 @@ import type { Result, WithSystemFields, ExecuteOptions, + ExecuteMethodOptions, } from "../types"; import { getAcceptHeader } from "../types"; -import type { TableOccurrence } from "./table-occurrence"; +import type { FMTable, InferSchemaOutputFromFMTable } from "../orm/table"; +import { + getTableName, + getTableId as getTableIdHelper, + isUsingEntityIds, +} from "../orm/table"; import { QueryBuilder } from "./query-builder"; import { type FFetchOptions } from "@fetchkit/ffetch"; -import { getTableIdentifiers } from "../transform"; import { parseErrorResponse } from "./error-parser"; /** * Initial delete builder returned from EntitySet.delete() * Requires calling .byId() or .where() before .execute() is available */ -export class DeleteBuilder> { - private tableName: string; +export class DeleteBuilder> { private databaseName: string; private context: ExecutionContext; - private occurrence?: TableOccurrence; + private table: Occ; private databaseUseEntityIds: boolean; constructor(config: { - occurrence?: TableOccurrence; - tableName: string; + occurrence: Occ; databaseName: string; context: ExecutionContext; databaseUseEntityIds?: boolean; }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; + this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; @@ -40,10 +42,9 @@ export class DeleteBuilder> { /** * Delete a single record by ID */ - byId(id: string | number): ExecutableDeleteBuilder { - return new ExecutableDeleteBuilder({ - occurrence: this.occurrence, - tableName: this.tableName, + byId(id: string | number): ExecutableDeleteBuilder { + return new ExecutableDeleteBuilder({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, mode: "byId", @@ -57,20 +58,11 @@ export class DeleteBuilder> { * @param fn Callback that receives a QueryBuilder for building the filter */ where( - fn: ( - q: QueryBuilder>, - ) => QueryBuilder>, - ): ExecutableDeleteBuilder { + fn: (q: QueryBuilder) => QueryBuilder, + ): ExecutableDeleteBuilder { // Create a QueryBuilder for the user to configure - const queryBuilder = new QueryBuilder< - WithSystemFields, - keyof WithSystemFields, - false, - false, - undefined - >({ - occurrence: undefined, - tableName: this.tableName, + const queryBuilder = new QueryBuilder({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, }); @@ -78,9 +70,8 @@ export class DeleteBuilder> { // Let the user configure it const configuredBuilder = fn(queryBuilder); - return new ExecutableDeleteBuilder({ - occurrence: this.occurrence, - tableName: this.tableName, + return new ExecutableDeleteBuilder({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, mode: "byFilter", @@ -94,30 +85,27 @@ export class DeleteBuilder> { * Executable delete builder - has execute() method * Returned after calling .byId() or .where() */ -export class ExecutableDeleteBuilder> +export class ExecutableDeleteBuilder> implements ExecutableBuilder<{ deletedCount: number }> { - private tableName: string; private databaseName: string; private context: ExecutionContext; - private occurrence?: TableOccurrence; + private table: Occ; private mode: "byId" | "byFilter"; private recordId?: string | number; - private queryBuilder?: QueryBuilder; + private queryBuilder?: QueryBuilder; private databaseUseEntityIds: boolean; constructor(config: { - occurrence?: TableOccurrence; - tableName: string; + occurrence: Occ; databaseName: string; context: ExecutionContext; mode: "byId" | "byFilter"; recordId?: string | number; - queryBuilder?: QueryBuilder; + queryBuilder?: QueryBuilder; databaseUseEntityIds?: boolean; }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; + this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.mode = config.mode; @@ -144,28 +132,23 @@ export class ExecutableDeleteBuilder> * @param useEntityIds - Optional override for entity ID usage */ private getTableId(useEntityIds?: boolean): string { - if (!this.occurrence) { - return this.tableName; - } - const contextDefault = this.context._getUseEntityIds?.() ?? false; const shouldUseIds = useEntityIds ?? contextDefault; if (shouldUseIds) { - const identifiers = getTableIdentifiers(this.occurrence); - if (!identifiers.id) { + if (!isUsingEntityIds(this.table)) { throw new Error( - `useEntityIds is true but TableOccurrence "${identifiers.name}" does not have an fmtId defined`, + `useEntityIds is true but table "${getTableName(this.table)}" does not have entity IDs configured`, ); } - return identifiers.id; + return getTableIdHelper(this.table); } - return this.occurrence.getTableName(); + return getTableName(this.table); } async execute( - options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, + options?: ExecuteMethodOptions, ): Promise> { // Merge database-level useEntityIds with per-request options const mergedOptions = this.mergeExecuteOptions(options); @@ -187,10 +170,11 @@ export class ExecutableDeleteBuilder> // Get the query string from the configured QueryBuilder const queryString = this.queryBuilder.getQueryString(); // Remove the leading "/" and table name from the query string as we'll build our own URL + const tableName = getTableName(this.table); const queryParams = queryString.startsWith(`/${tableId}`) ? queryString.slice(`/${tableId}`.length) - : queryString.startsWith(`/${this.tableName}`) - ? queryString.slice(`/${this.tableName}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) : queryString; url = `/${this.databaseName}/${tableId}${queryParams}`; @@ -237,10 +221,11 @@ export class ExecutableDeleteBuilder> } const queryString = this.queryBuilder.getQueryString(); + const tableName = getTableName(this.table); const queryParams = queryString.startsWith(`/${tableId}`) ? queryString.slice(`/${tableId}`.length) - : queryString.startsWith(`/${this.tableName}`) - ? queryString.slice(`/${this.tableName}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) : queryString; url = `/${this.databaseName}/${tableId}${queryParams}`; @@ -270,9 +255,10 @@ export class ExecutableDeleteBuilder> ): Promise> { // Check for error responses (important for batch operations) if (!response.ok) { + const tableName = getTableName(this.table); const error = await parseErrorResponse( response, - response.url || `/${this.databaseName}/${this.tableName}`, + response.url || `/${this.databaseName}/${tableName}`, ); return { data: undefined, error }; } diff --git a/packages/fmodata/src/client/entity-set.ts b/packages/fmodata/src/client/entity-set.ts index 73e11f9c..fb03d177 100644 --- a/packages/fmodata/src/client/entity-set.ts +++ b/packages/fmodata/src/client/entity-set.ts @@ -1,143 +1,83 @@ -import type { - ExecutionContext, - InferSchemaType, - WithSystemFields, - InsertData, - UpdateData, -} from "../types"; +import type { ExecutionContext } from "../types"; import type { StandardSchemaV1 } from "@standard-schema/spec"; -import type { BaseTable } from "./base-table"; -import type { TableOccurrence } from "./table-occurrence"; -import { QueryBuilder } from "./query-builder"; +import { QueryBuilder } from "./query/index"; import { RecordBuilder } from "./record-builder"; import { InsertBuilder } from "./insert-builder"; import { DeleteBuilder } from "./delete-builder"; import { UpdateBuilder } from "./update-builder"; import { Database } from "./database"; +import type { + FMTable, + InferSchemaOutputFromFMTable, + InsertDataFromFMTable, + UpdateDataFromFMTable, + ValidExpandTarget, + ColumnMap, +} from "../orm/table"; +import { + FMTable as FMTableClass, + getDefaultSelect, + getTableName, + getTableColumns, +} from "../orm/table"; +import type { FieldBuilder } from "../orm/field-builders"; +import { createLogger, InternalLogger } from "../logger"; -// Helper type to extract navigation relation names from an occurrence -type ExtractNavigationNames< - O extends TableOccurrence | undefined, -> = - O extends TableOccurrence - ? Nav extends Record - ? keyof Nav & string - : never - : never; - -// Helper type to extract schema from a TableOccurrence -type ExtractSchemaFromOccurrence = - O extends TableOccurrence - ? BT extends BaseTable - ? S - : never - : never; - -// Helper type to extract defaultSelect from a TableOccurrence +// Helper type to extract defaultSelect from an FMTable +// Since TypeScript can't extract Symbol-indexed properties at the type level, +// we simplify to return keyof InferSchemaFromFMTable when O is an FMTable. +// The actual defaultSelect logic is handled at runtime. type ExtractDefaultSelect = - O extends TableOccurrence - ? BT extends BaseTable - ? DefSelect extends "all" - ? keyof S - : DefSelect extends "schema" - ? keyof S - : DefSelect extends readonly (infer K)[] - ? K & keyof S - : keyof S + O extends FMTable ? keyof InferSchemaOutputFromFMTable : never; + +/** + * Helper type to extract properly-typed columns from an FMTable. + * This preserves the specific column types instead of widening to `any`. + */ +type ExtractColumnsFromOcc = + T extends FMTable + ? TFields extends Record> + ? ColumnMap : never : never; -// Helper type to find target occurrence by relation name -type FindNavigationTarget< - O extends TableOccurrence | undefined, - Name extends string, -> = - O extends TableOccurrence - ? Nav extends Record - ? Name extends keyof Nav - ? Nav[Name] - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - >; - -// Helper type to get the inferred schema type from a target occurrence -type GetTargetSchemaType< - O extends TableOccurrence | undefined, - Rel extends string, -> = [FindNavigationTarget] extends [ - TableOccurrence, -] - ? [BT] extends [BaseTable] - ? [S] extends [Record] - ? InferSchemaType - : Record - : Record - : Record; - -export class EntitySet< - Schema extends Record = any, - Occ extends TableOccurrence | undefined = undefined, -> { - private occurrence?: Occ; - private tableName: string; +export class EntitySet> { + private occurrence: Occ; private databaseName: string; private context: ExecutionContext; - private database: Database; // Database instance for accessing occurrences + private database: Database; // Database instance for accessing occurrences private isNavigateFromEntitySet?: boolean; private navigateRelation?: string; private navigateSourceTableName?: string; private navigateBasePath?: string; // Full base path for chained navigations + private databaseUseEntityIds: boolean; + private logger: InternalLogger; constructor(config: { - occurrence?: Occ; - tableName: string; + occurrence: Occ; databaseName: string; context: ExecutionContext; database?: any; }) { this.occurrence = config.occurrence; - this.tableName = config.tableName; this.databaseName = config.databaseName; this.context = config.context; this.database = config.database; + // Get useEntityIds from database if available, otherwise default to false + this.databaseUseEntityIds = + (config.database as any)?._useEntityIds ?? false; + this.logger = config.context?._getLogger?.() ?? createLogger(); } - // Type-only method to help TypeScript infer the schema from occurrence - static create< - OccurrenceSchema extends Record, - Occ extends - | TableOccurrence< - BaseTable, - any, - any, - any - > - | undefined = undefined, - >(config: { - occurrence?: Occ; - tableName: string; + // Type-only method to help TypeScript infer the schema from table + static create>(config: { + occurrence: Occ; databaseName: string; context: ExecutionContext; - database: Database; - }): EntitySet { - return new EntitySet({ + database: Database; + }): EntitySet { + return new EntitySet({ occurrence: config.occurrence, - tableName: config.tableName, databaseName: config.databaseName, context: config.context, database: config.database, @@ -145,58 +85,80 @@ export class EntitySet< } list(): QueryBuilder< - InferSchemaType, - Occ extends TableOccurrence - ? ExtractDefaultSelect - : keyof InferSchemaType, + Occ, + keyof InferSchemaOutputFromFMTable, false, false, - Occ + {} > { - const builder = new QueryBuilder< - InferSchemaType, - Occ extends TableOccurrence - ? ExtractDefaultSelect - : keyof InferSchemaType, - false, - false, - Occ - >({ + const builder = new QueryBuilder({ occurrence: this.occurrence as Occ, - tableName: this.tableName, databaseName: this.databaseName, context: this.context, - databaseUseEntityIds: this.database?.isUsingEntityIds() ?? false, + databaseUseEntityIds: this.databaseUseEntityIds, }); // Apply defaultSelect if occurrence exists and select hasn't been called if (this.occurrence) { - const defaultSelect = this.occurrence.defaultSelect; + // FMTable - access via helper functions + const defaultSelectValue = getDefaultSelect(this.occurrence); + const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; + let schema: Record | undefined; + + if (tableSchema) { + // Extract schema from StandardSchemaV1 + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + schema = zodSchema.shape as Record; + } + } - if (defaultSelect === "schema") { - // Extract field names from schema - const schema = this.occurrence.baseTable.schema; - const fields = Object.keys(schema) as (keyof InferSchemaType)[]; - // Deduplicate fields (same as select method) - const uniqueFields = [...new Set(fields)]; - return builder.select(...uniqueFields).top(1000); - } else if (Array.isArray(defaultSelect)) { - // Use the provided field names, deduplicated - const uniqueFields = [ - ...new Set(defaultSelect), - ] as (keyof InferSchemaType)[]; - return builder.select(...uniqueFields).top(1000); + if (defaultSelectValue === "schema") { + // Use getTableColumns to get all columns and select them + // This is equivalent to select(getTableColumns(occurrence)) + // Cast to the declared return type - runtime behavior handles the actual selection + const allColumns = getTableColumns( + this.occurrence, + ) as ExtractColumnsFromOcc; + return builder.select(allColumns).top(1000) as QueryBuilder< + Occ, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + } else if (typeof defaultSelectValue === "object") { + // defaultSelectValue is a select object (Record) + // Cast to the declared return type - runtime behavior handles the actual selection + return builder + .select(defaultSelectValue as ExtractColumnsFromOcc) + .top(1000) as QueryBuilder< + Occ, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; } // If defaultSelect is "all", no changes needed (current behavior) } // Propagate navigation context if present - if (this.isNavigateFromEntitySet) { - (builder as any).isNavigate = true; - (builder as any).navigateRelation = this.navigateRelation; - (builder as any).navigateSourceTableName = this.navigateSourceTableName; - (builder as any).navigateBasePath = this.navigateBasePath; - // navigateRecordId is intentionally not set (undefined) to indicate navigation from EntitySet + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (builder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + // recordId is intentionally not set (undefined) to indicate navigation from EntitySet + }; } // Apply default pagination limit of 1000 records to prevent stack overflow @@ -207,60 +169,82 @@ export class EntitySet< get( id: string | number, ): RecordBuilder< - InferSchemaType, - false, - keyof InferSchemaType, Occ, - Occ extends TableOccurrence - ? ExtractDefaultSelect - : keyof InferSchemaType, + false, + undefined, + keyof InferSchemaOutputFromFMTable, {} > { - const builder = new RecordBuilder< - InferSchemaType, - false, - keyof InferSchemaType, - Occ, - keyof InferSchemaType, - {} - >({ + const builder = new RecordBuilder({ occurrence: this.occurrence, - tableName: this.tableName, databaseName: this.databaseName, context: this.context, recordId: id, - databaseUseEntityIds: this.database?.isUsingEntityIds() ?? false, + databaseUseEntityIds: this.databaseUseEntityIds, }); // Apply defaultSelect if occurrence exists if (this.occurrence) { - const defaultSelect = this.occurrence.defaultSelect; + // FMTable - access via helper functions + const defaultSelectValue = getDefaultSelect(this.occurrence); + const tableSchema = (this.occurrence as any)[FMTableClass.Symbol.Schema]; + let schema: Record | undefined; + + if (tableSchema) { + // Extract schema from StandardSchemaV1 + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + schema = zodSchema.shape as Record; + } + } - if (defaultSelect === "schema") { - // Extract field names from schema - const schema = this.occurrence.baseTable.schema; - const fields = Object.keys(schema) as (keyof InferSchemaType)[]; - // Deduplicate fields (same as select method) - const uniqueFields = [...new Set(fields)]; - const selectedBuilder = builder.select(...uniqueFields); + if (defaultSelectValue === "schema") { + // Use getTableColumns to get all columns and select them + // This is equivalent to select(getTableColumns(occurrence)) + // Use ExtractColumnsFromOcc to preserve the properly-typed column types + const allColumns = getTableColumns( + this.occurrence as any, + ) as ExtractColumnsFromOcc; + const selectedBuilder = builder.select(allColumns); // Propagate navigation context if present - if (this.isNavigateFromEntitySet) { - (selectedBuilder as any).isNavigateFromEntitySet = true; - (selectedBuilder as any).navigateRelation = this.navigateRelation; - (selectedBuilder as any).navigateSourceTableName = this.navigateSourceTableName; + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (selectedBuilder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + }; } return selectedBuilder as any; - } else if (Array.isArray(defaultSelect)) { - // Use the provided field names, deduplicated - const uniqueFields = [ - ...new Set(defaultSelect), - ] as (keyof InferSchemaType)[]; - const selectedBuilder = builder.select(...uniqueFields); + } else if ( + typeof defaultSelectValue === "object" && + defaultSelectValue !== null && + !Array.isArray(defaultSelectValue) + ) { + // defaultSelectValue is a select object (Record) + // Use it directly with select() + // Use ExtractColumnsFromOcc to preserve the properly-typed column types + const selectedBuilder = builder.select( + defaultSelectValue as ExtractColumnsFromOcc, + ); // Propagate navigation context if present - if (this.isNavigateFromEntitySet) { - (selectedBuilder as any).isNavigateFromEntitySet = true; - (selectedBuilder as any).navigateRelation = this.navigateRelation; - (selectedBuilder as any).navigateSourceTableName = this.navigateSourceTableName; + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (selectedBuilder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + }; } return selectedBuilder as any; } @@ -268,173 +252,126 @@ export class EntitySet< } // Propagate navigation context if present - if (this.isNavigateFromEntitySet) { - (builder as any).isNavigateFromEntitySet = true; - (builder as any).navigateRelation = this.navigateRelation; - (builder as any).navigateSourceTableName = this.navigateSourceTableName; + if ( + this.isNavigateFromEntitySet && + this.navigateRelation && + this.navigateSourceTableName + ) { + (builder as any).navigation = { + relation: this.navigateRelation, + sourceTableName: this.navigateSourceTableName, + basePath: this.navigateBasePath, + }; } return builder as any; } - // Overload: when returnFullRecord is explicitly false + // Overload: when returnFullRecord is false insert( - data: Occ extends TableOccurrence - ? BT extends BaseTable - ? InsertData - : Partial> - : Partial>, + data: InsertDataFromFMTable, options: { returnFullRecord: false }, - ): InsertBuilder, Occ, "minimal">; + ): InsertBuilder; // Overload: when returnFullRecord is true or omitted (default) insert( - data: Occ extends TableOccurrence - ? BT extends BaseTable - ? InsertData - : Partial> - : Partial>, + data: InsertDataFromFMTable, options?: { returnFullRecord?: true }, - ): InsertBuilder, Occ, "representation">; + ): InsertBuilder; // Implementation insert( - data: Occ extends TableOccurrence - ? BT extends BaseTable - ? InsertData - : Partial> - : Partial>, + data: InsertDataFromFMTable, options?: { returnFullRecord?: boolean }, - ): InsertBuilder, Occ, "minimal" | "representation"> { - const returnPref = + ): InsertBuilder { + const returnPreference = options?.returnFullRecord === false ? "minimal" : "representation"; - return new InsertBuilder, Occ, typeof returnPref>({ + + return new InsertBuilder({ occurrence: this.occurrence, - tableName: this.tableName, databaseName: this.databaseName, context: this.context, - data: data as Partial>, - returnPreference: returnPref as any, - databaseUseEntityIds: this.database?.isUsingEntityIds() ?? false, + data: data as any, // Input type is validated/transformed at runtime + returnPreference: returnPreference as any, + databaseUseEntityIds: this.databaseUseEntityIds, }); } // Overload: when returnFullRecord is explicitly true update( - data: Occ extends TableOccurrence - ? BT extends BaseTable - ? UpdateData - : Partial> - : Partial>, + data: UpdateDataFromFMTable, options: { returnFullRecord: true }, - ): UpdateBuilder< - InferSchemaType, - Occ extends TableOccurrence - ? BT extends BaseTable - ? BT - : BaseTable - : BaseTable, - "representation" - >; + ): UpdateBuilder; - // Overload: when returnFullRecord is false or omitted (default returns count) + // Overload: when returnFullRecord is false or omitted (default) update( - data: Occ extends TableOccurrence - ? BT extends BaseTable - ? UpdateData - : Partial> - : Partial>, + data: UpdateDataFromFMTable, options?: { returnFullRecord?: false }, - ): UpdateBuilder< - InferSchemaType, - Occ extends TableOccurrence - ? BT extends BaseTable - ? BT - : BaseTable - : BaseTable, - "minimal" - >; + ): UpdateBuilder; // Implementation update( - data: Occ extends TableOccurrence - ? BT extends BaseTable - ? UpdateData - : Partial> - : Partial>, + data: UpdateDataFromFMTable, options?: { returnFullRecord?: boolean }, - ): UpdateBuilder< - InferSchemaType, - Occ extends TableOccurrence - ? BT extends BaseTable - ? BT - : BaseTable - : BaseTable, - "minimal" | "representation" - > { - const returnPref = + ): UpdateBuilder { + const returnPreference = options?.returnFullRecord === true ? "representation" : "minimal"; - return new UpdateBuilder< - InferSchemaType, - Occ extends TableOccurrence - ? BT extends BaseTable - ? BT - : BaseTable - : BaseTable, - typeof returnPref - >({ + + return new UpdateBuilder({ occurrence: this.occurrence, - tableName: this.tableName, databaseName: this.databaseName, context: this.context, - data: data as Partial>, - returnPreference: returnPref as any, - databaseUseEntityIds: this.database?.isUsingEntityIds() ?? false, + data: data as any, // Input type is validated/transformed at runtime + returnPreference: returnPreference as any, + databaseUseEntityIds: this.databaseUseEntityIds, }); } - delete(): DeleteBuilder> { - return new DeleteBuilder>({ + delete(): DeleteBuilder { + return new DeleteBuilder({ occurrence: this.occurrence, - tableName: this.tableName, databaseName: this.databaseName, context: this.context, - databaseUseEntityIds: this.database?.isUsingEntityIds() ?? false, - }); + databaseUseEntityIds: this.databaseUseEntityIds, + }) as any; } - // Overload for valid relation names - returns typed EntitySet - navigate>( - relationName: RelationName, - ): EntitySet< - ExtractSchemaFromOccurrence< - FindNavigationTarget - > extends Record - ? ExtractSchemaFromOccurrence> - : Record, - FindNavigationTarget - >; - // Overload for arbitrary strings - returns generic EntitySet - navigate( - relationName: string, - ): EntitySet, undefined>; // Implementation - navigate(relationName: string): EntitySet { - // Use the target occurrence if available, otherwise allow untyped navigation - // (useful when types might be incomplete) - const targetOccurrence = this.occurrence?.navigation[relationName]; - const entitySet = new EntitySet({ - occurrence: targetOccurrence, - tableName: targetOccurrence?.name ?? relationName, + navigate>( + targetTable: ValidExpandTarget, + ): EntitySet ? TargetTable : never> { + // Check if it's an FMTable object or a string + let relationName: string; + + // FMTable object - extract name and validate + relationName = getTableName(targetTable); + + // Runtime validation: Check if relation name is in navigationPaths + if ( + this.occurrence && + FMTableClass.Symbol.NavigationPaths in this.occurrence + ) { + const navigationPaths = (this.occurrence as any)[ + FMTableClass.Symbol.NavigationPaths + ] as readonly string[]; + if (navigationPaths && !navigationPaths.includes(relationName)) { + this.logger.warn( + `Cannot navigate to "${relationName}". Valid navigation paths: ${navigationPaths.length > 0 ? navigationPaths.join(", ") : "none"}`, + ); + } + } + + // Create EntitySet with target table + const entitySet = new EntitySet({ + occurrence: targetTable, databaseName: this.databaseName, context: this.context, + database: this.database, }); // Store the navigation info in the EntitySet - // We'll need to pass this through when creating QueryBuilders (entitySet as any).isNavigateFromEntitySet = true; (entitySet as any).navigateRelation = relationName; // Build the full base path for chained navigations - // The base path should contain all segments BEFORE the final relation if (this.isNavigateFromEntitySet && this.navigateBasePath) { // Already have a base path from previous navigation - extend it with current relation (entitySet as any).navigateBasePath = @@ -447,7 +384,9 @@ export class EntitySet< (entitySet as any).navigateSourceTableName = this.navigateSourceTableName; } else { // Initial navigation - source is just the table name - (entitySet as any).navigateSourceTableName = this.tableName; + (entitySet as any).navigateSourceTableName = getTableName( + this.occurrence, + ); } return entitySet; } diff --git a/packages/fmodata/src/client/error-parser.ts b/packages/fmodata/src/client/error-parser.ts index ea3ac9f6..fd31d12e 100644 --- a/packages/fmodata/src/client/error-parser.ts +++ b/packages/fmodata/src/client/error-parser.ts @@ -21,8 +21,10 @@ export async function parseErrorResponse( url: string, ): Promise { // Try to parse error body if it's JSON - let errorBody: { error?: { code?: string | number; message?: string } } | undefined; - + let errorBody: + | { error?: { code?: string | number; message?: string } } + | undefined; + try { if (response.headers.get("content-type")?.includes("application/json")) { errorBody = await safeJsonParse(response); @@ -52,4 +54,3 @@ export async function parseErrorResponse( // Fall back to generic HTTPError return new HTTPError(url, response.status, response.statusText, errorBody); } - diff --git a/packages/fmodata/src/client/filemaker-odata.ts b/packages/fmodata/src/client/filemaker-odata.ts index 9ef1c05a..a82233c6 100644 --- a/packages/fmodata/src/client/filemaker-odata.ts +++ b/packages/fmodata/src/client/filemaker-odata.ts @@ -15,20 +15,23 @@ import { ResponseParseError, } from "../errors"; import { Database } from "./database"; -import { TableOccurrence } from "./table-occurrence"; import { safeJsonParse } from "./sanitize-json"; import { get } from "es-toolkit/compat"; +import { createLogger, type Logger, type InternalLogger } from "../logger"; export class FMServerConnection implements ExecutionContext { private fetchClient: ReturnType; private serverUrl: string; private auth: Auth; private useEntityIds: boolean = false; + private logger: InternalLogger; constructor(config: { serverUrl: string; auth: Auth; fetchClientOptions?: FFetchOptions; + logger?: Logger; }) { + this.logger = createLogger(config.logger); this.fetchClient = createClient({ retries: 0, ...config.fetchClientOptions, @@ -68,6 +71,14 @@ export class FMServerConnection implements ExecutionContext { return `${this.serverUrl}${"apiKey" in this.auth ? `/otto` : ""}/fmi/odata/v4`; } + /** + * @internal + * Gets the logger instance + */ + _getLogger(): InternalLogger { + return this.logger; + } + /** * @internal */ @@ -75,6 +86,7 @@ export class FMServerConnection implements ExecutionContext { url: string, options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, ): Promise> { + const logger = this._getLogger(); const baseUrl = `${this.serverUrl}${"apiKey" in this.auth ? `/otto` : ""}/fmi/odata/v4`; const fullUrl = baseUrl + url; @@ -95,6 +107,10 @@ export class FMServerConnection implements ExecutionContext { ...(options?.headers || {}), }; + // Prepare loggableHeaders by omitting the Authorization key + const { Authorization, ...loggableHeaders } = headers; + logger.debug("Request headers:", loggableHeaders); + // TEMPORARY WORKAROUND: Hopefully this feature will be fixed in the ffetch library // Extract fetchHandler and headers separately, only for tests where we're overriding the fetch handler per-request const fetchHandler = options?.fetchHandler; @@ -117,6 +133,7 @@ export class FMServerConnection implements ExecutionContext { }; const resp = await clientToUse(fullUrl, finalOptions); + logger.debug(`${finalOptions.method ?? "GET"} ${resp.status} ${fullUrl}`); // Handle HTTP errors if (!resp.ok) { @@ -254,15 +271,12 @@ export class FMServerConnection implements ExecutionContext { } } - database< - const Occurrences extends readonly TableOccurrence[], - >( + database( name: string, config?: { - occurrences?: Occurrences | undefined; useEntityIds?: boolean; }, - ): Database { + ): Database { return new Database(name, this, config); } diff --git a/packages/fmodata/src/client/insert-builder.ts b/packages/fmodata/src/client/insert-builder.ts index 80f1ccbe..01b74113 100644 --- a/packages/fmodata/src/client/insert-builder.ts +++ b/packages/fmodata/src/client/insert-builder.ts @@ -6,16 +6,24 @@ import type { InferSchemaType, ExecuteOptions, ConditionallyWithODataAnnotations, + ExecuteMethodOptions, } from "../types"; import { getAcceptHeader } from "../types"; -import type { TableOccurrence } from "./table-occurrence"; -import { validateSingleResponse } from "../validation"; +import type { FMTable } from "../orm/table"; +import { + getBaseTableConfig, + getTableName, + getTableId as getTableIdHelper, + isUsingEntityIds, +} from "../orm/table"; +import { + validateSingleResponse, + validateAndTransformInput, +} from "../validation"; import { type FFetchOptions } from "@fetchkit/ffetch"; import { transformFieldNamesToIds, - transformTableName, transformResponseFields, - getTableIdentifiers, } from "../transform"; import { InvalidLocationHeaderError } from "../errors"; import { safeJsonParse } from "./sanitize-json"; @@ -25,35 +33,35 @@ export type InsertOptions = { return?: "minimal" | "representation"; }; +import type { InferSchemaOutputFromFMTable } from "../orm/table"; + export class InsertBuilder< - T extends Record, - Occ extends TableOccurrence | undefined = undefined, + Occ extends FMTable | undefined = undefined, ReturnPreference extends "minimal" | "representation" = "representation", > implements ExecutableBuilder< - ReturnPreference extends "minimal" ? { ROWID: number } : T + ReturnPreference extends "minimal" + ? { ROWID: number } + : InferSchemaOutputFromFMTable> > { - private occurrence?: Occ; - private tableName: string; + private table?: Occ; private databaseName: string; private context: ExecutionContext; - private data: Partial; + private data: Partial>>; private returnPreference: ReturnPreference; private databaseUseEntityIds: boolean; constructor(config: { occurrence?: Occ; - tableName: string; databaseName: string; context: ExecutionContext; - data: Partial; + data: Partial>>; returnPreference?: ReturnPreference; databaseUseEntityIds?: boolean; }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; + this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.data = config.data; @@ -75,7 +83,6 @@ export class InsertBuilder< }; } - /** * Parse ROWID from Location header * Expected formats: @@ -116,52 +123,69 @@ export class InsertBuilder< * @param useEntityIds - Optional override for entity ID usage */ private getTableId(useEntityIds?: boolean): string { - if (!this.occurrence) { - return this.tableName; + if (!this.table) { + throw new Error("Table occurrence is required"); } const contextDefault = this.context._getUseEntityIds?.() ?? false; const shouldUseIds = useEntityIds ?? contextDefault; if (shouldUseIds) { - const identifiers = getTableIdentifiers(this.occurrence); - if (!identifiers.id) { + if (!isUsingEntityIds(this.table)) { throw new Error( - `useEntityIds is true but TableOccurrence "${identifiers.name}" does not have an fmtId defined` + `useEntityIds is true but table "${getTableName(this.table)}" does not have entity IDs configured`, ); } - return identifiers.id; + return getTableIdHelper(this.table); } - return this.occurrence.getTableName(); + return getTableName(this.table); } async execute( - options?: RequestInit & FFetchOptions & EO, + options?: ExecuteMethodOptions, ): Promise< Result< ReturnPreference extends "minimal" ? { ROWID: number } : ConditionallyWithODataAnnotations< - T, + InferSchemaOutputFromFMTable>, EO["includeODataAnnotations"] extends true ? true : false > > > { // Merge database-level useEntityIds with per-request options const mergedOptions = this.mergeExecuteOptions(options); - + // Get table identifier with override support const tableId = this.getTableId(mergedOptions.useEntityIds); const url = `/${this.databaseName}/${tableId}`; + // Validate and transform input data using input validators (writeValidators) + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + // If validation fails, return error immediately + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + // Transform field names to FMFIDs if using entity IDs // Only transform if useEntityIds resolves to true (respects per-request override) const shouldUseIds = mergedOptions.useEntityIds ?? false; - - const transformedData = this.occurrence?.baseTable && shouldUseIds - ? transformFieldNamesToIds(this.data, this.occurrence.baseTable) - : this.data; + + const transformedData = + this.table && shouldUseIds + ? transformFieldNamesToIds(validatedData, this.table) + : validatedData; // Set Prefer header based on return preference const preferHeader = @@ -205,19 +229,31 @@ export class InsertBuilder< // Transform response field IDs back to names if using entity IDs // Only transform if useEntityIds resolves to true (respects per-request override) - if (this.occurrence?.baseTable && shouldUseIds) { + if (this.table && shouldUseIds) { response = transformResponseFields( response, - this.occurrence.baseTable, + this.table, undefined, // No expand configs for insert ); } - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; + // Get schema from table if available, excluding container fields + let schema: Record | undefined; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + } // Validate the response (FileMaker returns the created record) - const validation = await validateSingleResponse( + const validation = await validateSingleResponse< + InferSchemaOutputFromFMTable> + >( response, schema, undefined, // No selected fields for insert @@ -242,12 +278,14 @@ export class InsertBuilder< getRequestConfig(): { method: string; url: string; body?: any } { // For batch operations, use database-level setting (no per-request override available here) + // Note: Input validation happens in execute() and processResponse() for batch operations const tableId = this.getTableId(this.databaseUseEntityIds); // Transform field names to FMFIDs if using entity IDs - const transformedData = this.occurrence?.baseTable && this.databaseUseEntityIds - ? transformFieldNamesToIds(this.data, this.occurrence.baseTable) - : this.data; + const transformedData = + this.table && this.databaseUseEntityIds + ? transformFieldNamesToIds(this.data, this.table) + : this.data; return { method: "POST", @@ -281,13 +319,18 @@ export class InsertBuilder< response: Response, options?: ExecuteOptions, ): Promise< - Result + Result< + ReturnPreference extends "minimal" + ? { ROWID: number } + : InferSchemaOutputFromFMTable> + > > { // Check for error responses (important for batch operations) if (!response.ok) { + const tableName = this.table ? getTableName(this.table) : "unknown"; const error = await parseErrorResponse( response, - response.url || `/${this.databaseName}/${this.tableName}`, + response.url || `/${this.databaseName}/${tableName}`, ); return { data: undefined, error }; } @@ -346,24 +389,53 @@ export class InsertBuilder< }; } + // Validate and transform input data using input validators (writeValidators) + // This is needed for processResponse because it's called from batch operations + // where the data hasn't been validated yet + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + // Transform response field IDs back to names if using entity IDs // Only transform if useEntityIds resolves to true (respects per-request override) const shouldUseIds = options?.useEntityIds ?? this.databaseUseEntityIds; - + let transformedResponse = rawResponse; - if (this.occurrence?.baseTable && shouldUseIds) { + if (this.table && shouldUseIds) { transformedResponse = transformResponseFields( rawResponse, - this.occurrence.baseTable, + this.table, undefined, // No expand configs for insert ); } - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; + // Get schema from table if available, excluding container fields + let schema: Record | undefined; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const containerFields = baseTableConfig.containerFields || []; + + // Filter out container fields from schema + schema = { ...baseTableConfig.schema }; + for (const containerField of containerFields) { + delete schema[containerField as string]; + } + } // Validate the response (FileMaker returns the created record) - const validation = await validateSingleResponse( + const validation = await validateSingleResponse< + InferSchemaOutputFromFMTable> + >( transformedResponse, schema, undefined, // No selected fields for insert diff --git a/packages/fmodata/src/client/query-builder.ts b/packages/fmodata/src/client/query-builder.ts index e515a3ef..713db643 100644 --- a/packages/fmodata/src/client/query-builder.ts +++ b/packages/fmodata/src/client/query-builder.ts @@ -1,1535 +1,8 @@ -import { QueryOptions } from "odata-query"; -import buildQuery from "odata-query"; -import type { - ExecutionContext, - ExecutableBuilder, - WithSystemFields, - Result, - InferSchemaType, - ExecuteOptions, - ConditionallyWithODataAnnotations, - ExtractSchemaFromOccurrence, -} from "../types"; -import { getAcceptHeader } from "../types"; -import type { Filter } from "../filter-types"; -import type { TableOccurrence } from "./table-occurrence"; -import type { BaseTable } from "./base-table"; -import { validateListResponse, validateSingleResponse } from "../validation"; -import { RecordCountMismatchError } from "../errors"; -import { type FFetchOptions } from "@fetchkit/ffetch"; -import type { StandardSchemaV1 } from "@standard-schema/spec"; -import { - transformFieldNamesArray, - transformFieldName, - transformOrderByField, - transformResponseFields, - getTableIdentifiers, -} from "../transform"; -import { safeJsonParse } from "./sanitize-json"; -import { parseErrorResponse } from "./error-parser"; - -/** - * Default maximum number of records to return in a list query. - * This prevents stack overflow issues with large datasets while still - * allowing substantial data retrieval. Users can override with .top(). - */ -const DEFAULT_TOP = 1000; - -/** - * Type-safe orderBy type that provides better DX than odata-query's default. - * - * Supported forms: - * - `keyof T` - single field name (defaults to ascending) - * - `[keyof T, 'asc' | 'desc']` - single field with explicit direction - * - `Array<[keyof T, 'asc' | 'desc']>` - multiple fields with directions - * - * This type intentionally EXCLUDES `Array` to avoid ambiguity - * between [field1, field2] and [field, direction]. - */ -export type TypeSafeOrderBy = - | (keyof T & string) // Single field name - | [keyof T & string, "asc" | "desc"] // Single field with direction - | Array<[keyof T & string, "asc" | "desc"]>; // Multiple fields with directions - -// Helper type to extract navigation relation names from an occurrence -type ExtractNavigationNames< - O extends TableOccurrence | undefined, -> = - O extends TableOccurrence - ? Nav extends Record - ? keyof Nav & string - : never - : never; - -// Helper type to find target occurrence by relation name -type FindNavigationTarget< - O extends TableOccurrence | undefined, - Name extends string, -> = - O extends TableOccurrence - ? Nav extends Record - ? Name extends keyof Nav - ? Nav[Name] - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - >; - -// Helper type to get the inferred schema type from a target occurrence -type GetTargetSchemaType< - O extends TableOccurrence | undefined, - Rel extends string, -> = [FindNavigationTarget] extends [ - TableOccurrence, -] - ? [BT] extends [BaseTable] - ? [S] extends [Record] - ? InferSchemaType - : Record - : Record - : Record; - -// Internal type for expand configuration -type ExpandConfig = { - relation: string; - options?: Partial>; -}; - -// Type to represent expanded relations -export type ExpandedRelations = Record; - -export type QueryReturnType< - T extends Record, - Selected extends keyof T, - SingleMode extends "exact" | "maybe" | false, - IsCount extends boolean, - Expands extends ExpandedRelations, -> = IsCount extends true - ? number - : SingleMode extends "exact" - ? Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - } - : SingleMode extends "maybe" - ? - | (Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }) - | null - : (Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - })[]; - -export class QueryBuilder< - T extends Record, - Selected extends keyof T = keyof T, - SingleMode extends "exact" | "maybe" | false = false, - IsCount extends boolean = false, - Occ extends TableOccurrence | undefined = undefined, - Expands extends ExpandedRelations = {}, -> implements - ExecutableBuilder< - QueryReturnType - > -{ - private queryOptions: Partial> = {}; - private expandConfigs: ExpandConfig[] = []; - private singleMode: SingleMode = false as SingleMode; - private isCountMode = false as IsCount; - private occurrence?: Occ; - private tableName: string; - private databaseName: string; - private context: ExecutionContext; - private isNavigate?: boolean; - private navigateRecordId?: string | number; - private navigateRelation?: string; - private navigateSourceTableName?: string; - private navigateBaseRelation?: string; - private navigateBasePath?: string; // Full base path for chained entity set navigations - private databaseUseEntityIds: boolean; - - constructor(config: { - occurrence?: Occ; - tableName: string; - databaseName: string; - context: ExecutionContext; - databaseUseEntityIds?: boolean; - }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; - this.databaseName = config.databaseName; - this.context = config.context; - this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; - } - - /** - * Helper to merge database-level useEntityIds with per-request options - */ - private mergeExecuteOptions( - options?: RequestInit & FFetchOptions & ExecuteOptions, - ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { - // If useEntityIds is not set in options, use the database-level setting - return { - ...options, - useEntityIds: - options?.useEntityIds === undefined - ? this.databaseUseEntityIds - : options.useEntityIds, - }; - } - - /** - * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name - * @param useEntityIds - Optional override for entity ID usage - */ - private getTableId(useEntityIds?: boolean): string { - if (!this.occurrence) { - return this.tableName; - } - - const contextDefault = this.context._getUseEntityIds?.() ?? false; - const shouldUseIds = useEntityIds ?? contextDefault; - - if (shouldUseIds) { - const identifiers = getTableIdentifiers(this.occurrence); - if (!identifiers.id) { - throw new Error( - `useEntityIds is true but TableOccurrence "${identifiers.name}" does not have an fmtId defined`, - ); - } - return identifiers.id; - } - - return this.occurrence.getTableName(); - } - - select( - ...fields: K[] - ): QueryBuilder { - const uniqueFields = [...new Set(fields)]; - const newBuilder = new QueryBuilder< - T, - K, - SingleMode, - IsCount, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { - ...this.queryOptions, - select: uniqueFields as string[], - }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = this.singleMode; - newBuilder.isCountMode = this.isCountMode; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - /** - * Transforms our filter format to odata-query's expected format - * - Arrays of operators are converted to AND conditions - * - Single operator objects pass through as-is - * - Shorthand values are handled by odata-query - */ - private transformFilter( - filter: Filter>, - ): QueryOptions["filter"] { - if (typeof filter === "string") { - // Raw string filters pass through - return filter; - } - - if (Array.isArray(filter)) { - // Array of filters - odata-query handles this as implicit AND - return filter.map((f) => this.transformFilter(f as any)) as any; - } - - // Check if it's a logical filter (and/or/not) - if ("and" in filter || "or" in filter || "not" in filter) { - const result: any = {}; - if ("and" in filter && Array.isArray(filter.and)) { - result.and = filter.and.map((f: any) => this.transformFilter(f)); - } - if ("or" in filter && Array.isArray(filter.or)) { - result.or = filter.or.map((f: any) => this.transformFilter(f)); - } - if ("not" in filter && filter.not) { - result.not = this.transformFilter(filter.not as any); - } - return result; - } - - // Transform field filters - const result: any = {}; - const andConditions: any[] = []; - - for (const [field, value] of Object.entries(filter)) { - // Transform field name to FMFID if using entity IDs AND the feature is enabled - const shouldTransform = - this.occurrence?.baseTable && this.databaseUseEntityIds; - const fieldId = shouldTransform - ? transformFieldName(field, this.occurrence!.baseTable) - : field; - - if (Array.isArray(value)) { - // Array of operators - convert to AND conditions - if (value.length === 1) { - // Single operator in array - unwrap it - result[fieldId] = value[0]; - } else { - // Multiple operators - combine with AND - // Create separate conditions for each operator - for (const op of value) { - andConditions.push({ [fieldId]: op }); - } - } - } else if ( - value && - typeof value === "object" && - !(value instanceof Date) && - !Array.isArray(value) - ) { - // Check if it's an operator object (has operator keys like eq, gt, etc.) - const operatorKeys = [ - "eq", - "ne", - "gt", - "ge", - "lt", - "le", - "contains", - "startswith", - "endswith", - "in", - ]; - const isOperatorObject = operatorKeys.some((key) => key in value); - - if (isOperatorObject) { - // Single operator object - pass through - result[fieldId] = value; - } else { - // Regular object - might be nested filter, pass through - result[fieldId] = value; - } - } else { - // Primitive value (shorthand) - pass through - result[fieldId] = value; - } - } - - // If we have AND conditions from arrays, combine them - if (andConditions.length > 0) { - if (Object.keys(result).length > 0) { - // We have both regular fields and array-derived AND conditions - // Combine everything with AND - return { and: [...andConditions, result] }; - } else { - // Only array-derived AND conditions - return { and: andConditions }; - } - } - - return result; - } - - filter( - filter: Filter>, - ): QueryBuilder { - // Transform our filter format to odata-query's expected format - this.queryOptions.filter = this.transformFilter(filter) as any; - return this; - } - - /** - * Specify the sort order for query results. - * - * @example Single field (ascending by default) - * ```ts - * .orderBy("name") - * ``` - * - * @example Single field with explicit direction - * ```ts - * .orderBy(["name", "desc"]) - * ``` - * - * @example Multiple fields with directions - * ```ts - * .orderBy([["name", "asc"], ["createdAt", "desc"]]) - * ``` - */ - orderBy( - orderBy: TypeSafeOrderBy, - ): QueryBuilder { - // Transform field names to FMFIDs if using entity IDs - if (this.occurrence?.baseTable && orderBy) { - if (Array.isArray(orderBy)) { - // Check if it's a single tuple [field, direction] or array of tuples - if ( - orderBy.length === 2 && - typeof orderBy[0] === "string" && - (orderBy[1] === "asc" || orderBy[1] === "desc") - ) { - // Single tuple: [field, direction] - const [field, direction] = orderBy as [string, "asc" | "desc"]; - this.queryOptions.orderBy = `${transformOrderByField(field, this.occurrence.baseTable)} ${direction}`; - } else { - // Array of tuples: [[field, dir], [field, dir], ...] - this.queryOptions.orderBy = ( - orderBy as Array<[string, "asc" | "desc"]> - ).map(([field, direction]) => { - const transformedField = transformOrderByField( - field, - this.occurrence!.baseTable, - ); - return `${transformedField} ${direction}`; - }); - } - } else { - // Single field name (string) - this.queryOptions.orderBy = transformOrderByField( - String(orderBy), - this.occurrence.baseTable, - ); - } - } else { - // No occurrence/baseTable - pass through as-is - if (Array.isArray(orderBy)) { - if ( - orderBy.length === 2 && - typeof orderBy[0] === "string" && - (orderBy[1] === "asc" || orderBy[1] === "desc") - ) { - // Single tuple: [field, direction] - const [field, direction] = orderBy as [string, "asc" | "desc"]; - this.queryOptions.orderBy = `${field} ${direction}`; - } else { - // Array of tuples - this.queryOptions.orderBy = ( - orderBy as Array<[string, "asc" | "desc"]> - ).map(([field, direction]) => `${field} ${direction}`); - } - } else { - this.queryOptions.orderBy = orderBy; - } - } - return this; - } - - top( - count: number, - ): QueryBuilder { - this.queryOptions.top = count; - return this; - } - - skip( - count: number, - ): QueryBuilder { - this.queryOptions.skip = count; - return this; - } - - /** - * Formats select fields for use in query strings. - * - Transforms field names to FMFIDs if using entity IDs - * - Wraps "id" fields in double quotes - * - URL-encodes special characters but preserves spaces - */ - private formatSelectFields( - select: QueryOptions["select"], - baseTable?: BaseTable, - useEntityIds?: boolean, - ): string { - if (!select) return ""; - const selectFieldsArray = Array.isArray(select) ? select : [select]; - - // Transform to field IDs if using entity IDs AND the feature is enabled - const shouldTransform = - baseTable && (useEntityIds ?? this.databaseUseEntityIds); - const transformedFields = shouldTransform - ? transformFieldNamesArray( - selectFieldsArray.map((f) => String(f)), - baseTable, - ) - : selectFieldsArray.map((f) => String(f)); - - return transformedFields - .map((field) => { - if (field === "id") return `"id"`; - const encodedField = encodeURIComponent(String(field)); - return encodedField.replace(/%20/g, " "); - }) - .join(","); - } - - /** - * Builds expand validation configs from internal expand configurations. - * These are used to validate expanded navigation properties. - */ - private buildExpandValidationConfigs( - configs: ExpandConfig[], - ): import("../validation").ExpandValidationConfig[] { - return configs.map((config) => { - // Look up target occurrence from navigation - const targetOccurrence = this.occurrence?.navigation[config.relation]; - const targetSchema = targetOccurrence?.baseTable?.schema; - - // Extract selected fields from options - const selectedFields = config.options?.select - ? Array.isArray(config.options.select) - ? config.options.select.map((f) => String(f)) - : [String(config.options.select)] - : undefined; - - return { - relation: config.relation, - targetSchema: targetSchema, - targetOccurrence: targetOccurrence, - targetBaseTable: targetOccurrence?.baseTable, - occurrence: targetOccurrence, // Add occurrence for transformation - selectedFields: selectedFields, - nestedExpands: undefined, // TODO: Handle nested expands if needed - }; - }); - } - - /** - * Builds OData expand query string from expand configurations. - * Handles nested expands recursively. - * Transforms relation names to FMTIDs if using entity IDs. - */ - private buildExpandString(configs: ExpandConfig[]): string { - if (configs.length === 0) { - return ""; - } - - return configs - .map((config) => { - // Get target occurrence for this relation - const targetOccurrence = this.occurrence?.navigation[config.relation]; - - // When using entity IDs, use the target table's FMTID in the expand parameter - // FileMaker expects FMTID in $expand when Prefer header is set - // Only use FMTID if databaseUseEntityIds is enabled - const shouldUseTableId = - this.databaseUseEntityIds && targetOccurrence?.isUsingTableId(); - const relationName = shouldUseTableId - ? targetOccurrence!.getTableId() - : config.relation; - - if (!config.options || Object.keys(config.options).length === 0) { - // Simple expand without options - return relationName; - } - - // Build query options for this expand - const parts: string[] = []; - - if (config.options.select) { - // Pass target base table for field transformation - const selectFields = this.formatSelectFields( - config.options.select, - targetOccurrence?.baseTable, - ); - parts.push(`$select=${selectFields}`); - } - - if (config.options.filter) { - // Filter should already be transformed by the nested builder - // Use odata-query to build filter string - const filterQuery = buildQuery({ filter: config.options.filter }); - const filterMatch = filterQuery.match(/\$filter=([^&]+)/); - if (filterMatch) { - parts.push(`$filter=${filterMatch[1]}`); - } - } - - if (config.options.orderBy) { - // OrderBy should already be transformed by the nested builder - const orderByValue = Array.isArray(config.options.orderBy) - ? config.options.orderBy.join(",") - : config.options.orderBy; - parts.push(`$orderby=${String(orderByValue)}`); - } - - if (config.options.top !== undefined) { - parts.push(`$top=${config.options.top}`); - } - - if (config.options.skip !== undefined) { - parts.push(`$skip=${config.options.skip}`); - } - - // Handle nested expands (from expand configs) - if (config.options.expand) { - // If expand is a string, it's already been built - if (typeof config.options.expand === "string") { - parts.push(`$expand=${config.options.expand}`); - } - } - - if (parts.length === 0) { - return relationName; - } - - return `${relationName}(${parts.join(";")})`; - }) - .join(","); - } - - expand< - Rel extends ExtractNavigationNames | (string & {}), - TargetOcc extends FindNavigationTarget = FindNavigationTarget< - Occ, - Rel - >, - TargetSchema extends GetTargetSchemaType = GetTargetSchemaType< - Occ, - Rel - >, - TargetSelected extends keyof TargetSchema = keyof TargetSchema, - >( - relation: Rel, - callback?: ( - builder: QueryBuilder< - TargetSchema, - keyof TargetSchema, - false, - false, - TargetOcc extends TableOccurrence - ? TargetOcc - : undefined - >, - ) => QueryBuilder< - WithSystemFields, - TargetSelected, - any, - any, - any - >, - ): QueryBuilder< - T, - Selected, - SingleMode, - IsCount, - Occ, - Expands & { - [K in Rel]: { schema: TargetSchema; selected: TargetSelected }; - } - > { - // Look up target occurrence from navigation - const targetOccurrence = this.occurrence?.navigation[relation as string]; - - // Helper function to get defaultSelect fields from target occurrence - const getDefaultSelectFields = (): string[] | undefined => { - if (!targetOccurrence) return undefined; - const defaultSelect = targetOccurrence.defaultSelect; - if (defaultSelect === "schema") { - const schema = targetOccurrence.baseTable?.schema; - if (schema) { - return [...new Set(Object.keys(schema))]; - } - } else if (Array.isArray(defaultSelect)) { - return [...new Set(defaultSelect)]; - } - // If "all", return undefined (no select restriction) - return undefined; - }; - - if (callback) { - // Create a new QueryBuilder for the target occurrence - const targetBuilder = new QueryBuilder({ - occurrence: targetOccurrence, - tableName: targetOccurrence?.name ?? (relation as string), - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - - // Cast to the expected type for the callback - // At runtime, the builder is untyped (any), but at compile-time we enforce proper types - const typedBuilder = targetBuilder as QueryBuilder< - TargetSchema, - keyof TargetSchema, - false, - false, - TargetOcc extends TableOccurrence - ? TargetOcc - : undefined - >; - - // Pass to callback and get configured builder - const configuredBuilder = callback(typedBuilder); - - // Extract the builder's query options - const expandOptions: Partial> = { - ...configuredBuilder.queryOptions, - }; - - // If callback didn't provide select, apply defaultSelect from target occurrence - if (!expandOptions.select) { - const defaultFields = getDefaultSelectFields(); - if (defaultFields) { - expandOptions.select = defaultFields; - } - } - - // If the configured builder has nested expands, we need to include them - if (configuredBuilder.expandConfigs.length > 0) { - // Build nested expand string from the configured builder's expand configs - const nestedExpandString = this.buildExpandString( - configuredBuilder.expandConfigs, - ); - if (nestedExpandString) { - // Add nested expand to options - expandOptions.expand = nestedExpandString as any; - } - } - - const expandConfig: ExpandConfig = { - relation: relation as string, - options: expandOptions, - }; - - this.expandConfigs.push(expandConfig); - } else { - // Simple expand without callback - apply defaultSelect if available - const defaultFields = getDefaultSelectFields(); - if (defaultFields) { - this.expandConfigs.push({ - relation: relation as string, - options: { select: defaultFields }, - }); - } else { - this.expandConfigs.push({ relation: relation as string }); - } - } - - return this as any; - } - - single(): QueryBuilder { - const newBuilder = new QueryBuilder< - T, - Selected, - "exact", - IsCount, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { ...this.queryOptions }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = "exact"; - newBuilder.isCountMode = this.isCountMode; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - maybeSingle(): QueryBuilder { - const newBuilder = new QueryBuilder< - T, - Selected, - "maybe", - IsCount, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { ...this.queryOptions }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = "maybe"; - newBuilder.isCountMode = this.isCountMode; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - count(): QueryBuilder { - const newBuilder = new QueryBuilder< - T, - Selected, - SingleMode, - true, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { ...this.queryOptions, count: true }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = this.singleMode; - newBuilder.isCountMode = true as true; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - async execute( - options?: RequestInit & FFetchOptions & EO, - ): Promise< - Result< - IsCount extends true - ? number - : SingleMode extends "exact" - ? ConditionallyWithODataAnnotations< - Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }, - EO["includeODataAnnotations"] extends true ? true : false - > - : SingleMode extends "maybe" - ? ConditionallyWithODataAnnotations< - Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }, - EO["includeODataAnnotations"] extends true ? true : false - > | null - : ConditionallyWithODataAnnotations< - Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }, - EO["includeODataAnnotations"] extends true ? true : false - >[] - > - > { - // Build query without expand (we'll add it manually) - const queryOptionsWithoutExpand = { ...this.queryOptions }; - delete queryOptionsWithoutExpand.expand; - - const mergedOptions = this.mergeExecuteOptions(options); - - // Format select fields before building query - if (queryOptionsWithoutExpand.select) { - queryOptionsWithoutExpand.select = this.formatSelectFields( - queryOptionsWithoutExpand.select, - this.occurrence?.baseTable, - ) as any; - } - - let queryString = buildQuery(queryOptionsWithoutExpand); - - // Build custom expand string - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - const separator = queryString.includes("?") ? "&" : "?"; - queryString = `${queryString}${separator}$expand=${expandString}`; - } - - // Handle navigation from RecordBuilder - if ( - this.isNavigate && - this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - let url: string; - if (this.navigateBaseRelation) { - // Navigation from a navigated EntitySet: /sourceTable/baseRelation('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateBaseRelation}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } else { - // Normal navigation: /sourceTable('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } - const result = await this.context._makeRequest(url, mergedOptions); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - let response = result.data; - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = response as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - return { data: record as any, error: undefined }; - } else { - const records = resp.value ?? []; - const stripped = records.map((record: any) => record); - return { data: stripped as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as - | (keyof T)[] - | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - const validation = await validateSingleResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - return { data: validation.data as any, error: undefined }; - } else { - const validation = await validateListResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - return { data: validation.data as any, error: undefined }; - } - } - - // Handle navigation from EntitySet (without record ID) - if ( - this.isNavigate && - !this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - const result = await this.context._makeRequest( - `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateRelation}${queryString}`, - mergedOptions, - ); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - let response = result.data; - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = response as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - return { data: record as any, error: undefined }; - } else { - const records = resp.value ?? []; - const stripped = records.map((record: any) => record); - return { data: stripped as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as - | (keyof T)[] - | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - const validation = await validateSingleResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - return { data: validation.data as any, error: undefined }; - } else { - const validation = await validateListResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - return { data: validation.data as any, error: undefined }; - } - } - - // Handle $count endpoint - if (this.isCountMode) { - const tableId = this.getTableId(mergedOptions.useEntityIds); - const result = await this.context._makeRequest( - `/${this.databaseName}/${tableId}/$count${queryString}`, - mergedOptions, - ); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - // OData returns count as a string, convert to number - const count = - typeof result.data === "string" ? Number(result.data) : result.data; - return { data: count as number, error: undefined } as any; - } - - const tableId = this.getTableId(mergedOptions.useEntityIds); - const result = await this.context._makeRequest( - `/${this.databaseName}/${tableId}${queryString}`, - mergedOptions, - ); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - let response = result.data; - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = response as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - return { data: record as any, error: undefined }; - } else { - // Handle list response structure - const records = resp.value ?? []; - return { data: records as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as (keyof T)[] | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - const validation = await validateSingleResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - return { - data: validation.data as any, - error: undefined, - }; - } else { - const validation = await validateListResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - return { - data: validation.data as any, - error: undefined, - }; - } - } - - getQueryString(): string { - // Build query without expand (we'll add it manually) - const queryOptionsWithoutExpand = { ...this.queryOptions }; - delete queryOptionsWithoutExpand.expand; - - // Format select fields before building query - buildQuery treats & as separator, - // so we need to pre-encode special characters. buildQuery preserves encoded values. - if (queryOptionsWithoutExpand.select) { - queryOptionsWithoutExpand.select = this.formatSelectFields( - queryOptionsWithoutExpand.select, - this.occurrence?.baseTable, - ) as any; - } - - let queryParams = buildQuery(queryOptionsWithoutExpand); - - // Post-process: buildQuery encodes spaces as %20, but we want to preserve spaces - // Replace %20 with spaces in the $select part - if (this.queryOptions.select) { - queryParams = queryParams.replace( - /\$select=([^&]*)/, - (match, selectValue) => { - return `$select=${selectValue.replace(/%20/g, " ")}`; - }, - ); - } - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - const separator = queryParams.includes("?") ? "&" : "?"; - queryParams = `${queryParams}${separator}$expand=${expandString}`; - } - - // Handle navigation from RecordBuilder (with record ID) - if ( - this.isNavigate && - this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - let path: string; - if (this.navigateBaseRelation) { - // Navigation from a navigated EntitySet: /sourceTable/baseRelation('recordId')/relation - path = `/${this.navigateSourceTableName}/${this.navigateBaseRelation}('${this.navigateRecordId}')/${this.navigateRelation}`; - } else { - // Normal navigation: /sourceTableName('recordId')/relationName - path = `/${this.navigateSourceTableName}('${this.navigateRecordId}')/${this.navigateRelation}`; - } - // Append query params if any exist - return queryParams ? `${path}${queryParams}` : path; - } - - // Handle navigation from EntitySet (without record ID) - if ( - this.isNavigate && - !this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - let path: string; - if (this.navigateBasePath) { - // Chained navigation: /basePath/relationName (basePath already includes intermediate segments) - path = `/${this.navigateBasePath}/${this.navigateRelation}`; - } else { - // Single navigation: /sourceTableName/relationName - path = `/${this.navigateSourceTableName}/${this.navigateRelation}`; - } - // Append query params if any exist - return queryParams ? `${path}${queryParams}` : path; - } - - // Default case: return table ID (respects entity ID settings) with query params - const tableId = this.getTableId(this.databaseUseEntityIds); - return `/${tableId}${queryParams}`; - } - - getRequestConfig(): { method: string; url: string; body?: any } { - // Build query without expand (we'll add it manually) - const queryOptionsWithoutExpand = { ...this.queryOptions }; - delete queryOptionsWithoutExpand.expand; - - // Format select fields before building query - if (queryOptionsWithoutExpand.select) { - queryOptionsWithoutExpand.select = this.formatSelectFields( - queryOptionsWithoutExpand.select, - this.occurrence?.baseTable, - ) as any; - } - - let queryString = buildQuery(queryOptionsWithoutExpand); - - // Build custom expand string - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - const separator = queryString.includes("?") ? "&" : "?"; - queryString = `${queryString}${separator}$expand=${expandString}`; - } - - let url: string; - - // Handle navigation from RecordBuilder (with record ID) - if ( - this.isNavigate && - this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - if (this.navigateBaseRelation) { - // Navigation from a navigated EntitySet: /sourceTable/baseRelation('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateBaseRelation}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } else { - // Normal navigation: /sourceTable('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } - } else if ( - this.isNavigate && - !this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - // Handle navigation from EntitySet (without record ID) - if (this.navigateBasePath) { - // Chained navigation: /basePath/relationName (basePath already includes intermediate segments) - url = `/${this.databaseName}/${this.navigateBasePath}/${this.navigateRelation}${queryString}`; - } else { - // Single navigation: /sourceTableName/relationName - url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateRelation}${queryString}`; - } - } else if (this.isCountMode) { - // Use getTableId to respect entity ID settings (for batch operations) - const tableId = this.getTableId(this.databaseUseEntityIds); - url = `/${this.databaseName}/${tableId}/$count${queryString}`; - } else { - // Use getTableId to respect entity ID settings (for batch operations) - const tableId = this.getTableId(this.databaseUseEntityIds); - url = `/${this.databaseName}/${tableId}${queryString}`; - } - - return { - method: "GET", - url, - }; - } - - toRequest(baseUrl: string, options?: ExecuteOptions): Request { - const config = this.getRequestConfig(); - const fullUrl = `${baseUrl}${config.url}`; - - return new Request(fullUrl, { - method: config.method, - headers: { - "Content-Type": "application/json", - Accept: getAcceptHeader(options?.includeODataAnnotations), - }, - }); - } - - async processResponse( - response: Response, - options?: ExecuteOptions, - ): Promise< - Result> - > { - // Check for error responses (important for batch operations) - if (!response.ok) { - const error = await parseErrorResponse( - response, - response.url || `/${this.databaseName}/${this.tableName}`, - ); - return { data: undefined, error }; - } - - // Handle 204 No Content (shouldn't happen for queries, but handle it gracefully) - if (response.status === 204) { - // Return empty list for list queries, null for single queries - if (this.singleMode !== false) { - if (this.singleMode === "maybe") { - return { data: null as any, error: undefined }; - } - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: [] as any, error: undefined }; - } - - // Parse the response body (using safeJsonParse to handle FileMaker's invalid JSON with unquoted ? values) - let rawData; - try { - rawData = await safeJsonParse(response); - } catch (err) { - // Check if it's an empty body error (common with 204 responses) - if (err instanceof SyntaxError && response.status === 204) { - // Handled above, but just in case - return { data: [] as any, error: undefined }; - } - return { - data: undefined, - error: { - name: "ResponseParseError", - message: `Failed to parse response JSON: ${err instanceof Error ? err.message : "Unknown error"}`, - timestamp: new Date(), - } as any, - }; - } - - if (!rawData) { - return { - data: undefined, - error: { - name: "ResponseError", - message: "Response body was empty or null", - timestamp: new Date(), - } as any, - }; - } - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = options?.useEntityIds ?? this.databaseUseEntityIds; - - let transformedData = rawData; - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - transformedData = transformResponseFields( - rawData, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = transformedData as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - return { data: record as any, error: undefined }; - } else { - // Handle list response structure - const records = resp.value ?? []; - return { data: records as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as (keyof T)[] | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - // Single mode (one() or oneOrNull()) - const validation = await validateSingleResponse( - transformedData, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - - if (validation.data === null) { - return { data: null as any, error: undefined }; - } - - return { data: validation.data as any, error: undefined }; - } - - // List mode - const validation = await validateListResponse( - transformedData, - schema, - selectedFields, - expandValidationConfigs, - ); - - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - - return { data: validation.data as any, error: undefined }; - } -} +// Re-export QueryBuilder and types from the new modular location +// This maintains backward compatibility for existing imports +export { + QueryBuilder, + type TypeSafeOrderBy, + type ExpandedRelations, + type QueryReturnType, +} from "./query/index"; diff --git a/packages/fmodata/src/client/query-builder.ts.bak b/packages/fmodata/src/client/query-builder.ts.bak deleted file mode 100644 index c9a27405..00000000 --- a/packages/fmodata/src/client/query-builder.ts.bak +++ /dev/null @@ -1,1457 +0,0 @@ -import { QueryOptions } from "odata-query"; -import buildQuery from "odata-query"; -import type { - ExecutionContext, - ExecutableBuilder, - WithSystemFields, - Result, - InferSchemaType, - ExecuteOptions, - ConditionallyWithODataAnnotations, - ExtractSchemaFromOccurrence, -} from "../types"; -import type { Filter } from "../filter-types"; -import type { TableOccurrence } from "./table-occurrence"; -import type { BaseTable } from "./base-table"; -import { validateListResponse, validateSingleResponse } from "../validation"; -import { RecordCountMismatchError } from "../errors"; -import { type FFetchOptions } from "@fetchkit/ffetch"; -import type { StandardSchemaV1 } from "@standard-schema/spec"; -import { - transformFieldNamesArray, - transformFieldName, - transformOrderByField, - transformResponseFields, - getTableIdentifiers, -} from "../transform"; - - -/** - * Default maximum number of records to return in a list query. - * This prevents stack overflow issues with large datasets while still - * allowing substantial data retrieval. Users can override with .top(). - */ -const DEFAULT_TOP = 1000; - -// Helper type to extract navigation relation names from an occurrence -type ExtractNavigationNames< - O extends TableOccurrence | undefined, -> = - O extends TableOccurrence - ? Nav extends Record - ? keyof Nav & string - : never - : never; - -// Helper type to resolve a navigation item (handles both direct and lazy-loaded) -type ResolveNavigationItem = T extends () => infer R ? R : T; - -// Helper type to find target occurrence by relation name -type FindNavigationTarget< - O extends TableOccurrence | undefined, - Name extends string, -> = - O extends TableOccurrence - ? Nav extends Record - ? Name extends keyof Nav - ? ResolveNavigationItem - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - >; - -// Helper type to get the inferred schema type from a target occurrence -type GetTargetSchemaType< - O extends TableOccurrence | undefined, - Rel extends string, -> = [FindNavigationTarget] extends [ - TableOccurrence, -] - ? [BT] extends [BaseTable] - ? [S] extends [Record] - ? InferSchemaType - : Record - : Record - : Record; - -// Internal type for expand configuration -type ExpandConfig = { - relation: string; - options?: Partial>; -}; - -// Type to represent expanded relations -export type ExpandedRelations = Record; - -export type QueryReturnType< - T extends Record, - Selected extends keyof T, - SingleMode extends "exact" | "maybe" | false, - IsCount extends boolean, - Expands extends ExpandedRelations, -> = IsCount extends true - ? number - : SingleMode extends "exact" - ? Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - } - : SingleMode extends "maybe" - ? - | (Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }) - | null - : (Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - })[]; - -export class QueryBuilder< - T extends Record, - Selected extends keyof T = keyof T, - SingleMode extends "exact" | "maybe" | false = false, - IsCount extends boolean = false, - Occ extends TableOccurrence | undefined = undefined, - Expands extends ExpandedRelations = {}, -> implements - ExecutableBuilder< - QueryReturnType - > -{ - private queryOptions: Partial> = {}; - private expandConfigs: ExpandConfig[] = []; - private singleMode: SingleMode = false as SingleMode; - private isCountMode = false as IsCount; - private occurrence?: Occ; - private tableName: string; - private databaseName: string; - private context: ExecutionContext; - private isNavigate?: boolean; - private navigateRecordId?: string | number; - private navigateRelation?: string; - private navigateSourceTableName?: string; - private navigateBaseRelation?: string; - private databaseUseEntityIds: boolean; - - constructor(config: { - occurrence?: Occ; - tableName: string; - databaseName: string; - context: ExecutionContext; - databaseUseEntityIds?: boolean; - }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; - this.databaseName = config.databaseName; - this.context = config.context; - this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; - } - - /** - * Helper to merge database-level useEntityIds with per-request options - */ - private mergeExecuteOptions( - options?: RequestInit & FFetchOptions & ExecuteOptions, - ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { - // If useEntityIds is not set in options, use the database-level setting - return { - ...options, - useEntityIds: - options?.useEntityIds === undefined - ? this.databaseUseEntityIds - : options.useEntityIds, - }; - } - - /** - * Helper to conditionally strip OData annotations based on options - */ - private stripODataAnnotationsIfNeeded>( - data: T, - options?: ExecuteOptions, - ): T { - // Only include annotations if explicitly requested - if (options?.includeODataAnnotations === true) { - return data; - } - - // Strip OData annotations - const { "@id": _id, "@editLink": _editLink, ...rest } = data; - return rest as T; - } - - /** - * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name - * @param useEntityIds - Optional override for entity ID usage - */ - private getTableId(useEntityIds?: boolean): string { - if (!this.occurrence) { - return this.tableName; - } - - const contextDefault = this.context._getUseEntityIds?.() ?? false; - const shouldUseIds = useEntityIds ?? contextDefault; - - if (shouldUseIds) { - const identifiers = getTableIdentifiers(this.occurrence); - if (!identifiers.id) { - throw new Error( - `useEntityIds is true but TableOccurrence "${identifiers.name}" does not have an fmtId defined`, - ); - } - return identifiers.id; - } - - return this.occurrence.getTableName(); - } - - select( - ...fields: K[] - ): QueryBuilder { - const uniqueFields = [...new Set(fields)]; - const newBuilder = new QueryBuilder< - T, - K, - SingleMode, - IsCount, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { - ...this.queryOptions, - select: uniqueFields as string[], - }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = this.singleMode; - newBuilder.isCountMode = this.isCountMode; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - /** - * Transforms our filter format to odata-query's expected format - * - Arrays of operators are converted to AND conditions - * - Single operator objects pass through as-is - * - Shorthand values are handled by odata-query - */ - private transformFilter( - filter: Filter>, - ): QueryOptions["filter"] { - if (typeof filter === "string") { - // Raw string filters pass through - return filter; - } - - if (Array.isArray(filter)) { - // Array of filters - odata-query handles this as implicit AND - return filter.map((f) => this.transformFilter(f as any)) as any; - } - - // Check if it's a logical filter (and/or/not) - if ("and" in filter || "or" in filter || "not" in filter) { - const result: any = {}; - if ("and" in filter && Array.isArray(filter.and)) { - result.and = filter.and.map((f: any) => this.transformFilter(f)); - } - if ("or" in filter && Array.isArray(filter.or)) { - result.or = filter.or.map((f: any) => this.transformFilter(f)); - } - if ("not" in filter && filter.not) { - result.not = this.transformFilter(filter.not as any); - } - return result; - } - - // Transform field filters - const result: any = {}; - const andConditions: any[] = []; - - for (const [field, value] of Object.entries(filter)) { - // Transform field name to FMFID if using entity IDs - const fieldId = this.occurrence?.baseTable - ? transformFieldName(field, this.occurrence.baseTable) - : field; - - if (Array.isArray(value)) { - // Array of operators - convert to AND conditions - if (value.length === 1) { - // Single operator in array - unwrap it - result[fieldId] = value[0]; - } else { - // Multiple operators - combine with AND - // Create separate conditions for each operator - for (const op of value) { - andConditions.push({ [fieldId]: op }); - } - } - } else if ( - value && - typeof value === "object" && - !(value instanceof Date) && - !Array.isArray(value) - ) { - // Check if it's an operator object (has operator keys like eq, gt, etc.) - const operatorKeys = [ - "eq", - "ne", - "gt", - "ge", - "lt", - "le", - "contains", - "startswith", - "endswith", - "in", - ]; - const isOperatorObject = operatorKeys.some((key) => key in value); - - if (isOperatorObject) { - // Single operator object - pass through - result[fieldId] = value; - } else { - // Regular object - might be nested filter, pass through - result[fieldId] = value; - } - } else { - // Primitive value (shorthand) - pass through - result[fieldId] = value; - } - } - - // If we have AND conditions from arrays, combine them - if (andConditions.length > 0) { - if (Object.keys(result).length > 0) { - // We have both regular fields and array-derived AND conditions - // Combine everything with AND - return { and: [...andConditions, result] }; - } else { - // Only array-derived AND conditions - return { and: andConditions }; - } - } - - return result; - } - - filter( - filter: Filter>, - ): QueryBuilder { - // Transform our filter format to odata-query's expected format - this.queryOptions.filter = this.transformFilter(filter) as any; - return this; - } - - orderBy( - orderBy: QueryOptions["orderBy"], - ): QueryBuilder { - // Transform field names to FMFIDs if using entity IDs - if (this.occurrence?.baseTable && orderBy) { - if (Array.isArray(orderBy)) { - this.queryOptions.orderBy = orderBy.map((field) => - transformOrderByField(String(field), this.occurrence!.baseTable), - ); - } else { - this.queryOptions.orderBy = transformOrderByField( - String(orderBy), - this.occurrence.baseTable, - ); - } - } else { - this.queryOptions.orderBy = orderBy; - } - return this; - } - - top( - count: number, - ): QueryBuilder { - this.queryOptions.top = count; - console.log("top method", { - count, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - return this; - } - - skip( - count: number, - ): QueryBuilder { - this.queryOptions.skip = count; - return this; - } - - /** - * Formats select fields for use in query strings. - * - Transforms field names to FMFIDs if using entity IDs - * - Wraps "id" fields in double quotes - * - URL-encodes special characters but preserves spaces - */ - private formatSelectFields( - select: QueryOptions["select"], - baseTable?: BaseTable, - ): string { - if (!select) return ""; - const selectFieldsArray = Array.isArray(select) ? select : [select]; - - // Transform to field IDs if using entity IDs - const transformedFields = baseTable - ? transformFieldNamesArray( - selectFieldsArray.map((f) => String(f)), - baseTable, - ) - : selectFieldsArray.map((f) => String(f)); - - return transformedFields - .map((field) => { - if (field === "id") return `"id"`; - const encodedField = encodeURIComponent(String(field)); - return encodedField.replace(/%20/g, " "); - }) - .join(","); - } - - /** - * Builds expand validation configs from internal expand configurations. - * These are used to validate expanded navigation properties. - */ - private buildExpandValidationConfigs( - configs: ExpandConfig[], - ): import("../validation").ExpandValidationConfig[] { - return configs.map((config) => { - // Look up target occurrence from navigation - const targetOccurrence = this.occurrence?.navigation[config.relation]; - const targetSchema = targetOccurrence?.baseTable?.schema; - - // Extract selected fields from options - const selectedFields = config.options?.select - ? Array.isArray(config.options.select) - ? config.options.select.map((f) => String(f)) - : [String(config.options.select)] - : undefined; - - return { - relation: config.relation, - targetSchema: targetSchema, - targetOccurrence: targetOccurrence, - targetBaseTable: targetOccurrence?.baseTable, - occurrence: targetOccurrence, // Add occurrence for transformation - selectedFields: selectedFields, - nestedExpands: undefined, // TODO: Handle nested expands if needed - }; - }); - } - - /** - * Builds OData expand query string from expand configurations. - * Handles nested expands recursively. - * Transforms relation names to FMTIDs if using entity IDs. - */ - private buildExpandString(configs: ExpandConfig[]): string { - if (configs.length === 0) { - return ""; - } - - return configs - .map((config) => { - // Get target occurrence for this relation - const targetOccurrence = this.occurrence?.navigation[config.relation]; - - // When using entity IDs, use the target table's FMTID in the expand parameter - // FileMaker expects FMTID in $expand when Prefer header is set - const relationName = - targetOccurrence && targetOccurrence.isUsingTableId() - ? targetOccurrence.getTableId() - : config.relation; - - if (!config.options || Object.keys(config.options).length === 0) { - // Simple expand without options - return relationName; - } - - // Build query options for this expand - const parts: string[] = []; - - if (config.options.select) { - // Pass target base table for field transformation - const selectFields = this.formatSelectFields( - config.options.select, - targetOccurrence?.baseTable, - ); - parts.push(`$select=${selectFields}`); - } - - if (config.options.filter) { - // Filter should already be transformed by the nested builder - // Use odata-query to build filter string - const filterQuery = buildQuery({ filter: config.options.filter }); - const filterMatch = filterQuery.match(/\$filter=([^&]+)/); - if (filterMatch) { - parts.push(`$filter=${filterMatch[1]}`); - } - } - - if (config.options.orderBy) { - // OrderBy should already be transformed by the nested builder - const orderByValue = Array.isArray(config.options.orderBy) - ? config.options.orderBy.join(",") - : config.options.orderBy; - parts.push(`$orderby=${String(orderByValue)}`); - } - - if (config.options.top !== undefined) { - parts.push(`$top=${config.options.top}`); - } - - if (config.options.skip !== undefined) { - parts.push(`$skip=${config.options.skip}`); - } - - // Handle nested expands (from expand configs) - if (config.options.expand) { - // If expand is a string, it's already been built - if (typeof config.options.expand === "string") { - parts.push(`$expand=${config.options.expand}`); - } - } - - if (parts.length === 0) { - return relationName; - } - - return `${relationName}(${parts.join(";")})`; - }) - .join(","); - } - - expand< - Rel extends ExtractNavigationNames | (string & {}), - TargetOcc extends FindNavigationTarget = FindNavigationTarget< - Occ, - Rel - >, - TargetSchema extends GetTargetSchemaType = GetTargetSchemaType< - Occ, - Rel - >, - TargetSelected extends keyof TargetSchema = keyof TargetSchema, - >( - relation: Rel, - callback?: ( - builder: QueryBuilder< - TargetSchema, - keyof TargetSchema, - false, - false, - TargetOcc extends TableOccurrence - ? TargetOcc - : undefined - >, - ) => QueryBuilder< - WithSystemFields, - TargetSelected, - any, - any, - any - >, - ): QueryBuilder< - T, - Selected, - SingleMode, - IsCount, - Occ, - Expands & { - [K in Rel]: { schema: TargetSchema; selected: TargetSelected }; - } - > { - // Look up target occurrence from navigation - const targetOccurrence = this.occurrence?.navigation[relation as string]; - - if (callback) { - // Create a new QueryBuilder for the target occurrence - const targetBuilder = new QueryBuilder({ - occurrence: targetOccurrence, - tableName: targetOccurrence?.name ?? (relation as string), - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - - // Cast to the expected type for the callback - // At runtime, the builder is untyped (any), but at compile-time we enforce proper types - const typedBuilder = targetBuilder as QueryBuilder< - TargetSchema, - keyof TargetSchema, - false, - false, - TargetOcc extends TableOccurrence - ? TargetOcc - : undefined - >; - - // Pass to callback and get configured builder - const configuredBuilder = callback(typedBuilder); - - // Extract the builder's query options - const expandOptions: Partial> = { - ...configuredBuilder.queryOptions, - }; - - // If the configured builder has nested expands, we need to include them - if (configuredBuilder.expandConfigs.length > 0) { - // Build nested expand string from the configured builder's expand configs - const nestedExpandString = this.buildExpandString( - configuredBuilder.expandConfigs, - ); - if (nestedExpandString) { - // Add nested expand to options - expandOptions.expand = nestedExpandString as any; - } - } - - const expandConfig: ExpandConfig = { - relation: relation as string, - options: expandOptions, - }; - - this.expandConfigs.push(expandConfig); - } else { - // Simple expand without callback - this.expandConfigs.push({ relation: relation as string }); - } - - return this as any; - } - - single(): QueryBuilder { - const newBuilder = new QueryBuilder< - T, - Selected, - "exact", - IsCount, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { ...this.queryOptions }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = "exact"; - newBuilder.isCountMode = this.isCountMode; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - maybeSingle(): QueryBuilder { - const newBuilder = new QueryBuilder< - T, - Selected, - "maybe", - IsCount, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { ...this.queryOptions }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = "maybe"; - newBuilder.isCountMode = this.isCountMode; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - count(): QueryBuilder { - const newBuilder = new QueryBuilder< - T, - Selected, - SingleMode, - true, - Occ, - Expands - >({ - occurrence: this.occurrence, - tableName: this.tableName, - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - newBuilder.queryOptions = { ...this.queryOptions, count: true }; - newBuilder.expandConfigs = [...this.expandConfigs]; - newBuilder.singleMode = this.singleMode; - newBuilder.isCountMode = true as true; - // Preserve navigation metadata - newBuilder.isNavigate = this.isNavigate; - newBuilder.navigateRecordId = this.navigateRecordId; - newBuilder.navigateRelation = this.navigateRelation; - newBuilder.navigateSourceTableName = this.navigateSourceTableName; - newBuilder.navigateBaseRelation = this.navigateBaseRelation; - return newBuilder; - } - - async execute( - options?: RequestInit & FFetchOptions & EO, - ): Promise< - Result< - IsCount extends true - ? number - : SingleMode extends "exact" - ? ConditionallyWithODataAnnotations< - Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }, - EO["includeODataAnnotations"] extends true ? true : false - > - : SingleMode extends "maybe" - ? ConditionallyWithODataAnnotations< - Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }, - EO["includeODataAnnotations"] extends true ? true : false - > | null - : ConditionallyWithODataAnnotations< - Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }, - EO["includeODataAnnotations"] extends true ? true : false - >[] - > - > { - // Build query without expand (we'll add it manually) - const queryOptionsWithoutExpand = { ...this.queryOptions }; - delete queryOptionsWithoutExpand.expand; - - const mergedOptions = this.mergeExecuteOptions(options); - - // Format select fields before building query - if (queryOptionsWithoutExpand.select) { - queryOptionsWithoutExpand.select = this.formatSelectFields( - queryOptionsWithoutExpand.select, - this.occurrence?.baseTable, - ) as any; - } - - let queryString = buildQuery(queryOptionsWithoutExpand); - - // Build custom expand string - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - const separator = queryString.includes("?") ? "&" : "?"; - queryString = `${queryString}${separator}$expand=${expandString}`; - } - - // Handle navigation from RecordBuilder - if ( - this.isNavigate && - this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - let url: string; - if (this.navigateBaseRelation) { - // Navigation from a navigated EntitySet: /sourceTable/baseRelation('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateBaseRelation}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } else { - // Normal navigation: /sourceTable('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } - const result = await this.context._makeRequest(url, mergedOptions); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - let response = result.data; - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = response as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - const stripped = this.stripODataAnnotationsIfNeeded(record, options); - return { data: stripped as any, error: undefined }; - } else { - const records = resp.value ?? []; - const stripped = records.map((record: any) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as - | (keyof T)[] - | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - const validation = await validateSingleResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - const stripped = validation.data - ? this.stripODataAnnotationsIfNeeded(validation.data, options) - : null; - return { data: stripped as any, error: undefined }; - } else { - const validation = await validateListResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - const stripped = validation.data.map((record) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } - } - - // Handle navigation from EntitySet (without record ID) - if ( - this.isNavigate && - !this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - const result = await this.context._makeRequest( - `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateRelation}${queryString}`, - mergedOptions, - ); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - let response = result.data; - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = response as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - const stripped = this.stripODataAnnotationsIfNeeded(record, options); - return { data: stripped as any, error: undefined }; - } else { - const records = resp.value ?? []; - const stripped = records.map((record: any) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as - | (keyof T)[] - | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - const validation = await validateSingleResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - const stripped = validation.data - ? this.stripODataAnnotationsIfNeeded(validation.data, options) - : null; - return { data: stripped as any, error: undefined }; - } else { - const validation = await validateListResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - const stripped = validation.data.map((record) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } - } - - // Handle $count endpoint - if (this.isCountMode) { - const tableId = this.getTableId(mergedOptions.useEntityIds); - const result = await this.context._makeRequest( - `/${this.databaseName}/${tableId}/$count${queryString}`, - mergedOptions, - ); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - // OData returns count as a string, convert to number - const count = - typeof result.data === "string" ? Number(result.data) : result.data; - return { data: count as number, error: undefined } as any; - } - - const tableId = this.getTableId(mergedOptions.useEntityIds); - const result = await this.context._makeRequest( - `/${this.databaseName}/${tableId}${queryString}`, - mergedOptions, - ); - - if (result.error) { - return { data: undefined, error: result.error }; - } - - let response = result.data; - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = response as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - const stripped = this.stripODataAnnotationsIfNeeded(record, options); - return { data: stripped as any, error: undefined }; - } else { - // Handle list response structure - const records = resp.value ?? []; - const stripped = records.map((record: any) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as (keyof T)[] | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - const validation = await validateSingleResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - const stripped = validation.data - ? this.stripODataAnnotationsIfNeeded(validation.data, options) - : null; - return { - data: stripped as any, - error: undefined, - }; - } else { - const validation = await validateListResponse( - response, - schema, - selectedFields, - expandValidationConfigs, - ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - const stripped = validation.data.map((record) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { - data: stripped as any, - error: undefined, - }; - } - } - - getQueryString(): string { - // Build query without expand (we'll add it manually) - const queryOptionsWithoutExpand = { ...this.queryOptions }; - delete queryOptionsWithoutExpand.expand; - - // Format select fields before building query - buildQuery treats & as separator, - // so we need to pre-encode special characters. buildQuery preserves encoded values. - if (queryOptionsWithoutExpand.select) { - queryOptionsWithoutExpand.select = this.formatSelectFields( - queryOptionsWithoutExpand.select, - ) as any; - } - - let queryParams = buildQuery(queryOptionsWithoutExpand); - - // Post-process: buildQuery encodes spaces as %20, but we want to preserve spaces - // Replace %20 with spaces in the $select part - if (this.queryOptions.select) { - queryParams = queryParams.replace( - /\$select=([^&]*)/, - (match, selectValue) => { - return `$select=${selectValue.replace(/%20/g, " ")}`; - }, - ); - } - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - const separator = queryParams.includes("?") ? "&" : "?"; - queryParams = `${queryParams}${separator}$expand=${expandString}`; - } - - // Handle navigation from RecordBuilder (with record ID) - if ( - this.isNavigate && - this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - let path: string; - if (this.navigateBaseRelation) { - // Navigation from a navigated EntitySet: /sourceTable/baseRelation('recordId')/relation - path = `/${this.navigateSourceTableName}/${this.navigateBaseRelation}('${this.navigateRecordId}')/${this.navigateRelation}`; - } else { - // Normal navigation: /sourceTableName('recordId')/relationName - path = `/${this.navigateSourceTableName}('${this.navigateRecordId}')/${this.navigateRelation}`; - } - // Append query params if any exist - return queryParams ? `${path}${queryParams}` : path; - } - - // Handle navigation from EntitySet (without record ID) - if ( - this.isNavigate && - !this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - // Return the path portion: /sourceTableName/relationName - const path = `/${this.navigateSourceTableName}/${this.navigateRelation}`; - // Append query params if any exist - return queryParams ? `${path}${queryParams}` : path; - } - - // Default case: return table name with query params - return `/${this.tableName}${queryParams}`; - } - - getRequestConfig(): { method: string; url: string; body?: any } { - // Build query without expand (we'll add it manually) - const queryOptionsWithoutExpand = { ...this.queryOptions }; - delete queryOptionsWithoutExpand.expand; - - // Format select fields before building query - if (queryOptionsWithoutExpand.select) { - queryOptionsWithoutExpand.select = this.formatSelectFields( - queryOptionsWithoutExpand.select, - ) as any; - } - - let queryString = buildQuery(queryOptionsWithoutExpand); - - // Build custom expand string - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - const separator = queryString.includes("?") ? "&" : "?"; - queryString = `${queryString}${separator}$expand=${expandString}`; - } - - let url: string; - - // Handle navigation from RecordBuilder (with record ID) - if ( - this.isNavigate && - this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - if (this.navigateBaseRelation) { - // Navigation from a navigated EntitySet: /sourceTable/baseRelation('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateBaseRelation}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } else { - // Normal navigation: /sourceTable('recordId')/relation - url = `/${this.databaseName}/${this.navigateSourceTableName}('${this.navigateRecordId}')/${this.navigateRelation}${queryString}`; - } - } else if ( - this.isNavigate && - !this.navigateRecordId && - this.navigateRelation && - this.navigateSourceTableName - ) { - // Handle navigation from EntitySet (without record ID) - url = `/${this.databaseName}/${this.navigateSourceTableName}/${this.navigateRelation}${queryString}`; - } else if (this.isCountMode) { - url = `/${this.databaseName}/${this.tableName}/$count${queryString}`; - } else { - url = `/${this.databaseName}/${this.tableName}${queryString}`; - } - - return { - method: "GET", - url, - }; - } - - toRequest(baseUrl: string): Request { - const config = this.getRequestConfig(); - const fullUrl = `${baseUrl}${config.url}`; - - return new Request(fullUrl, { - method: config.method, - headers: { - "Content-Type": "application/json", - Accept: "application/json", - }, - }); - } - - async processResponse( - response: Response, - options?: ExecuteOptions, - ): Promise< - Result> - > { - // Handle 204 No Content (shouldn't happen for queries, but handle it gracefully) - if (response.status === 204) { - // Return empty list for list queries, null for single queries - if (this.singleMode !== false) { - if (this.singleMode === "maybe") { - return { data: null as any, error: undefined }; - } - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: [] as any, error: undefined }; - } - - // Parse the response body - let rawData; - try { - rawData = await response.json(); - } catch (err) { - // Check if it's an empty body error (common with 204 responses) - if (err instanceof SyntaxError && response.status === 204) { - // Handled above, but just in case - return { data: [] as any, error: undefined }; - } - return { - data: undefined, - error: { - name: "ResponseParseError", - message: `Failed to parse response JSON: ${err instanceof Error ? err.message : "Unknown error"}`, - timestamp: new Date(), - } as any, - }; - } - - if (!rawData) { - return { - data: undefined, - error: { - name: "ResponseError", - message: "Response body was empty or null", - timestamp: new Date(), - } as any, - }; - } - - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = options?.useEntityIds ?? this.databaseUseEntityIds; - - let transformedData = rawData; - if (this.occurrence?.baseTable && shouldUseIds) { - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - transformedData = transformResponseFields( - rawData, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Skip validation if requested - if (options?.skipValidation === true) { - const resp = transformedData as any; - if (this.singleMode !== false) { - const records = resp.value ?? [resp]; - const count = Array.isArray(records) ? records.length : 1; - - if (count > 1) { - return { - data: undefined, - error: new RecordCountMismatchError( - this.singleMode === "exact" ? "one" : "at-most-one", - count, - ), - }; - } - - if (count === 0) { - if (this.singleMode === "exact") { - return { - data: undefined, - error: new RecordCountMismatchError("one", 0), - }; - } - return { data: null as any, error: undefined }; - } - - const record = Array.isArray(records) ? records[0] : records; - const stripped = this.stripODataAnnotationsIfNeeded(record, options); - return { data: stripped as any, error: undefined }; - } else { - // Handle list response structure - const records = resp.value ?? []; - const stripped = records.map((record: any) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - const selectedFields = this.queryOptions.select as (keyof T)[] | undefined; - const expandValidationConfigs = this.buildExpandValidationConfigs( - this.expandConfigs, - ); - - if (this.singleMode !== false) { - // Single mode (one() or oneOrNull()) - const validation = await validateSingleResponse( - transformedData, - schema, - selectedFields, - expandValidationConfigs, - this.singleMode, - ); - - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - - if (validation.data === null) { - return { data: null as any, error: undefined }; - } - - const stripped = this.stripODataAnnotationsIfNeeded( - validation.data, - options, - ); - return { data: stripped as any, error: undefined }; - } - - // List mode - const validation = await validateListResponse( - transformedData, - schema, - selectedFields, - expandValidationConfigs, - ); - - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - - const stripped = validation.data.map((record) => - this.stripODataAnnotationsIfNeeded(record, options), - ); - return { data: stripped as any, error: undefined }; - } -} diff --git a/packages/fmodata/src/client/query/expand-builder.ts b/packages/fmodata/src/client/query/expand-builder.ts new file mode 100644 index 00000000..0e459e3d --- /dev/null +++ b/packages/fmodata/src/client/query/expand-builder.ts @@ -0,0 +1,164 @@ +import { QueryOptions } from "odata-query"; +import buildQuery from "odata-query"; +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { FMTable } from "../../orm/table"; +import type { ExpandValidationConfig } from "../../validation"; +import { formatSelectFields } from "../builders/select-utils"; + +/** + * Internal type for expand configuration + */ +export type ExpandConfig = { + relation: string; + options?: Partial>; + targetTable?: FMTable; +}; + +/** + * Builds OData expand query strings and validation configs. + * Handles nested expands recursively and transforms relation names to FMTIDs + * when using entity IDs. + */ +export class ExpandBuilder { + constructor(private useEntityIds: boolean) {} + + /** + * Builds OData expand query string from expand configurations. + * Handles nested expands recursively. + * Transforms relation names to FMTIDs if using entity IDs. + */ + buildExpandString(configs: ExpandConfig[]): string { + if (configs.length === 0) { + return ""; + } + + return configs.map((config) => this.buildSingleExpand(config)).join(","); + } + + /** + * Builds a single expand string with its options. + */ + private buildSingleExpand(config: ExpandConfig): string { + // Get target table/occurrence from config (stored during expand call) + const targetTable = config.targetTable; + + // When using entity IDs, use the target table's FMTID in the expand parameter + // FileMaker expects FMTID in $expand when Prefer header is set + // Only use FMTID if databaseUseEntityIds is enabled + let relationName = config.relation; + if (this.useEntityIds) { + if (targetTable && FMTable.Symbol.EntityId in targetTable) { + const tableId = (targetTable as any)[FMTable.Symbol.EntityId] as + | `FMTID:${string}` + | undefined; + if (tableId) { + relationName = tableId; + } + } + } + + if (!config.options || Object.keys(config.options).length === 0) { + // Simple expand without options + return relationName; + } + + // Build query options for this expand + const parts: string[] = []; + + if (config.options.select) { + // Use shared formatSelectFields function for consistent id field quoting + const selectArray = Array.isArray(config.options.select) + ? config.options.select.map(String) + : [String(config.options.select)]; + const selectFields = formatSelectFields( + selectArray, + targetTable, + this.useEntityIds, + ); + parts.push(`$select=${selectFields}`); + } + + if (config.options.filter) { + // Filter should already be transformed by the nested builder + // Use odata-query to build filter string + const filterQuery = buildQuery({ filter: config.options.filter }); + const filterMatch = filterQuery.match(/\$filter=([^&]+)/); + if (filterMatch) { + parts.push(`$filter=${filterMatch[1]}`); + } + } + + if (config.options.orderBy) { + // OrderBy should already be transformed by the nested builder + const orderByValue = Array.isArray(config.options.orderBy) + ? config.options.orderBy.join(",") + : config.options.orderBy; + parts.push(`$orderby=${String(orderByValue)}`); + } + + if (config.options.top !== undefined) { + parts.push(`$top=${config.options.top}`); + } + + if (config.options.skip !== undefined) { + parts.push(`$skip=${config.options.skip}`); + } + + // Handle nested expands (from expand configs) + if (config.options.expand) { + // If expand is a string, it's already been built + if (typeof config.options.expand === "string") { + parts.push(`$expand=${config.options.expand}`); + } + } + + if (parts.length === 0) { + return relationName; + } + + return `${relationName}(${parts.join(";")})`; + } + + /** + * Builds expand validation configs from internal expand configurations. + * These are used to validate expanded navigation properties. + */ + buildValidationConfigs(configs: ExpandConfig[]): ExpandValidationConfig[] { + return configs.map((config) => { + // Get target table/occurrence from config (stored during expand call) + const targetTable = config.targetTable; + + // Extract schema from target table/occurrence + let targetSchema: Record | undefined; + if (targetTable) { + const tableSchema = (targetTable as any)[FMTable.Symbol.Schema]; + if (tableSchema) { + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + targetSchema = zodSchema.shape as Record; + } + } + } + + // Extract selected fields from options + const selectedFields = config.options?.select + ? Array.isArray(config.options.select) + ? config.options.select.map((f) => String(f)) + : [String(config.options.select)] + : undefined; + + return { + relation: config.relation, + targetSchema: targetSchema, + targetTable: targetTable, + table: targetTable, // For transformation + selectedFields: selectedFields, + nestedExpands: undefined, // TODO: Handle nested expands if needed + }; + }); + } +} diff --git a/packages/fmodata/src/client/query/index.ts b/packages/fmodata/src/client/query/index.ts new file mode 100644 index 00000000..094a4d89 --- /dev/null +++ b/packages/fmodata/src/client/query/index.ts @@ -0,0 +1,13 @@ +// Re-export QueryBuilder as the main export +export { QueryBuilder } from "./query-builder"; + +// Export types +export type { + TypeSafeOrderBy, + ExpandedRelations, + QueryReturnType, +} from "./query-builder"; + +// Export ExpandConfig from expand-builder +export type { ExpandConfig } from "./expand-builder"; + diff --git a/packages/fmodata/src/client/query/query-builder.ts b/packages/fmodata/src/client/query/query-builder.ts new file mode 100644 index 00000000..90b12d61 --- /dev/null +++ b/packages/fmodata/src/client/query/query-builder.ts @@ -0,0 +1,742 @@ +import { QueryOptions } from "odata-query"; +import buildQuery from "odata-query"; +import type { + ExecutionContext, + ExecutableBuilder, + Result, + ExecuteOptions, + ConditionallyWithODataAnnotations, + ExtractSchemaFromOccurrence, + ExecuteMethodOptions, +} from "../../types"; +import { RecordCountMismatchError } from "../../errors"; +import { type FFetchOptions } from "@fetchkit/ffetch"; +import { + transformFieldNamesArray, + transformOrderByField, +} from "../../transform"; +import { safeJsonParse } from "../sanitize-json"; +import { parseErrorResponse } from "../error-parser"; +import { isColumn, type Column } from "../../orm/column"; +import { + FilterExpression, + OrderByExpression, + isOrderByExpression, +} from "../../orm/operators"; +import { + FMTable, + type InferSchemaOutputFromFMTable, + type ValidExpandTarget, + type ExtractTableName, + type ValidateNoContainerFields, + getTableName, +} from "../../orm/table"; +import { + ExpandBuilder, + type ExpandConfig, + type ExpandedRelations, + resolveTableId, + mergeExecuteOptions, + formatSelectFields, + processQueryResponse, + processSelectWithRenames, + buildSelectExpandQueryString, + createODataRequest, +} from "../builders/index"; +import { QueryUrlBuilder, type NavigationConfig } from "./url-builder"; +import type { TypeSafeOrderBy, QueryReturnType } from "./types"; +import { createLogger, InternalLogger } from "../../logger"; + +// Re-export QueryReturnType for backward compatibility +export type { QueryReturnType }; + +/** + * Default maximum number of records to return in a list query. + * This prevents stack overflow issues with large datasets while still + * allowing substantial data retrieval. Users can override with .top(). + */ +const DEFAULT_TOP = 1000; + +export type { TypeSafeOrderBy, ExpandedRelations }; + +export class QueryBuilder< + Occ extends FMTable, + Selected extends + | keyof InferSchemaOutputFromFMTable + | Record< + string, + Column> + > = keyof InferSchemaOutputFromFMTable, + SingleMode extends "exact" | "maybe" | false = false, + IsCount extends boolean = false, + Expands extends ExpandedRelations = {}, +> implements + ExecutableBuilder< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands + > + > +{ + private queryOptions: Partial< + QueryOptions> + > = {}; + private expandConfigs: ExpandConfig[] = []; + private singleMode: SingleMode = false as SingleMode; + private isCountMode = false as IsCount; + private occurrence: Occ; + private databaseName: string; + private context: ExecutionContext; + private navigation?: NavigationConfig; + private databaseUseEntityIds: boolean; + private expandBuilder: ExpandBuilder; + private urlBuilder: QueryUrlBuilder; + // Mapping from field names to output keys (for renamed fields in select) + private fieldMapping?: Record; + private logger: InternalLogger; + + constructor(config: { + occurrence: Occ; + databaseName: string; + context: ExecutionContext; + databaseUseEntityIds?: boolean; + }) { + this.occurrence = config.occurrence; + this.databaseName = config.databaseName; + this.context = config.context; + this.logger = config.context?._getLogger?.() ?? createLogger(); + this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.expandBuilder = new ExpandBuilder( + this.databaseUseEntityIds, + this.logger, + ); + this.urlBuilder = new QueryUrlBuilder( + this.databaseName, + this.occurrence, + this.context, + ); + } + + /** + * Helper to merge database-level useEntityIds with per-request options + */ + private mergeExecuteOptions( + options?: RequestInit & FFetchOptions & ExecuteOptions, + ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { + return mergeExecuteOptions(options, this.databaseUseEntityIds); + } + + /** + * Gets the FMTable instance + */ + private getTable(): FMTable | undefined { + return this.occurrence; + } + + /** + * Gets the table ID (FMTID) if using entity IDs, otherwise returns the table name + * @param useEntityIds - Optional override for entity ID usage + */ + private getTableIdOrName(useEntityIds?: boolean): string { + return resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + useEntityIds, + ); + } + + /** + * Creates a new QueryBuilder with modified configuration. + * Used by single(), maybeSingle(), count(), and select() to create new instances. + */ + private cloneWithChanges< + NewSelected extends + | keyof InferSchemaOutputFromFMTable + | Record>> = Selected, + NewSingle extends "exact" | "maybe" | false = SingleMode, + NewCount extends boolean = IsCount, + >(changes: { + selectedFields?: NewSelected; + singleMode?: NewSingle; + isCountMode?: NewCount; + queryOptions?: Partial>>; + fieldMapping?: Record; + }): QueryBuilder { + const newBuilder = new QueryBuilder< + Occ, + NewSelected, + NewSingle, + NewCount, + Expands + >({ + occurrence: this.occurrence, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }); + newBuilder.queryOptions = { + ...this.queryOptions, + ...changes.queryOptions, + }; + newBuilder.expandConfigs = [...this.expandConfigs]; + newBuilder.singleMode = (changes.singleMode ?? this.singleMode) as any; + newBuilder.isCountMode = (changes.isCountMode ?? this.isCountMode) as any; + newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping; + // Copy navigation metadata + newBuilder.navigation = this.navigation; + newBuilder.urlBuilder = new QueryUrlBuilder( + this.databaseName, + this.occurrence, + this.context, + ); + return newBuilder; + } + + /** + * Select fields using column references. + * Allows renaming fields by using different keys in the object. + * Container fields cannot be selected and will cause a type error. + * + * @example + * db.from(users).list().select({ + * name: users.name, + * userEmail: users.email // renamed! + * }) + * + * @param fields - Object mapping output keys to column references (container fields excluded) + * @returns QueryBuilder with updated selected fields + */ + select< + TSelect extends Record< + string, + Column, false> + >, + >(fields: TSelect): QueryBuilder { + const tableName = getTableName(this.occurrence); + const { selectedFields, fieldMapping } = processSelectWithRenames( + fields, + tableName, + this.logger, + ); + + return this.cloneWithChanges({ + selectedFields: fields as any, + queryOptions: { + select: selectedFields, + }, + fieldMapping: + Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined, + }); + } + + /** + * Filter results using operator expressions (new ORM-style API). + * Supports eq, gt, lt, and, or, etc. operators with Column references. + * Also supports raw OData filter strings as an escape hatch. + * + * @example + * .where(eq(users.hobby, "reading")) + * .where(and(eq(users.active, true), gt(users.age, 18))) + * .where("status eq 'active'") // Raw OData string escape hatch + */ + where( + expression: FilterExpression | string, + ): QueryBuilder { + // Handle raw string filters (escape hatch) + if (typeof expression === "string") { + this.queryOptions.filter = expression; + return this; + } + // Convert FilterExpression to OData filter string + const filterString = expression.toODataFilter(this.databaseUseEntityIds); + this.queryOptions.filter = filterString; + return this; + } + + /** + * Specify the sort order for query results. + * + * @example Single field (ascending by default) + * ```ts + * .orderBy("name") + * .orderBy(users.name) // Column reference + * .orderBy(asc(users.name)) // Explicit ascending + * ``` + * + * @example Single field with explicit direction + * ```ts + * .orderBy(["name", "desc"]) + * .orderBy([users.name, "desc"]) // Column reference + * .orderBy(desc(users.name)) // Explicit descending + * ``` + * + * @example Multiple fields with directions + * ```ts + * .orderBy([["name", "asc"], ["createdAt", "desc"]]) + * .orderBy([[users.name, "asc"], [users.createdAt, "desc"]]) // Column references + * .orderBy(users.name, desc(users.age)) // Variadic with helpers + * ``` + */ + orderBy( + ...orderByArgs: + | [ + | TypeSafeOrderBy> + | Column> + | OrderByExpression>, + ] + | [ + Column>, + ...Array< + | Column> + | OrderByExpression> + >, + ] + ): QueryBuilder { + const tableName = getTableName(this.occurrence); + + // Handle variadic arguments (multiple fields) + if (orderByArgs.length > 1) { + const orderByParts = orderByArgs.map((arg) => { + if (isOrderByExpression(arg)) { + // Validate table match + if (arg.column.tableName !== tableName) { + this.logger.warn( + `Column ${arg.column.toString()} is from table "${arg.column.tableName}", but query is for table "${tableName}"`, + ); + } + const fieldName = arg.column.fieldName; + const transformedField = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + return `${transformedField} ${arg.direction}`; + } else if (isColumn(arg)) { + // Validate table match + if (arg.tableName !== tableName) { + this.logger.warn( + `Column ${arg.toString()} is from table "${arg.tableName}", but query is for table "${tableName}"`, + ); + } + const fieldName = arg.fieldName; + const transformedField = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + return transformedField; // Default to ascending + } else { + throw new Error( + "Variadic orderBy() only accepts Column or OrderByExpression arguments", + ); + } + }); + this.queryOptions.orderBy = orderByParts; + return this; + } + + // Handle single argument + const orderBy = orderByArgs[0]; + + // Handle OrderByExpression + if (isOrderByExpression(orderBy)) { + // Validate table match + if (orderBy.column.tableName !== tableName) { + this.logger.warn( + `Column ${orderBy.column.toString()} is from table "${orderBy.column.tableName}", but query is for table "${tableName}"`, + ); + } + const fieldName = orderBy.column.fieldName; + const transformedField = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + this.queryOptions.orderBy = `${transformedField} ${orderBy.direction}`; + return this; + } + + // Handle Column references + if (isColumn(orderBy)) { + // Validate table match + if (orderBy.tableName !== tableName) { + this.logger.warn( + `Column ${orderBy.toString()} is from table "${orderBy.tableName}", but query is for table "${tableName}"`, + ); + } + // Single Column reference without direction (defaults to ascending) + const fieldName = orderBy.fieldName; + this.queryOptions.orderBy = this.occurrence + ? transformOrderByField(fieldName, this.occurrence) + : fieldName; + return this; + } + // Transform field names to FMFIDs if using entity IDs + if (this.occurrence && orderBy) { + if (Array.isArray(orderBy)) { + // Check if it's a single tuple [field, direction] or array of tuples + if ( + orderBy.length === 2 && + (typeof orderBy[0] === "string" || isColumn(orderBy[0])) && + (orderBy[1] === "asc" || orderBy[1] === "desc") + ) { + // Single tuple: [field, direction] or [column, direction] + const field = isColumn(orderBy[0]) + ? orderBy[0].fieldName + : orderBy[0]; + const direction = orderBy[1] as "asc" | "desc"; + this.queryOptions.orderBy = `${transformOrderByField(field, this.occurrence)} ${direction}`; + } else { + // Array of tuples: [[field, dir], [field, dir], ...] + this.queryOptions.orderBy = ( + orderBy as Array<[any, "asc" | "desc"]> + ).map(([fieldOrCol, direction]) => { + const field = isColumn(fieldOrCol) + ? fieldOrCol.fieldName + : String(fieldOrCol); + const transformedField = transformOrderByField( + field, + this.occurrence!, + ); + return `${transformedField} ${direction}`; + }); + } + } else { + // Single field name (string) + this.queryOptions.orderBy = transformOrderByField( + String(orderBy), + this.occurrence, + ); + } + } else { + // No occurrence/baseTable - pass through as-is + if (Array.isArray(orderBy)) { + if ( + orderBy.length === 2 && + (typeof orderBy[0] === "string" || isColumn(orderBy[0])) && + (orderBy[1] === "asc" || orderBy[1] === "desc") + ) { + // Single tuple: [field, direction] or [column, direction] + const field = isColumn(orderBy[0]) + ? orderBy[0].fieldName + : orderBy[0]; + const direction = orderBy[1] as "asc" | "desc"; + this.queryOptions.orderBy = `${field} ${direction}`; + } else { + // Array of tuples + this.queryOptions.orderBy = ( + orderBy as Array<[any, "asc" | "desc"]> + ).map(([fieldOrCol, direction]) => { + const field = isColumn(fieldOrCol) + ? fieldOrCol.fieldName + : String(fieldOrCol); + return `${field} ${direction}`; + }); + } + } else { + this.queryOptions.orderBy = orderBy; + } + } + return this; + } + + top( + count: number, + ): QueryBuilder { + this.queryOptions.top = count; + return this; + } + + skip( + count: number, + ): QueryBuilder { + this.queryOptions.skip = count; + return this; + } + + expand< + TargetTable extends FMTable, + TSelected extends + | keyof InferSchemaOutputFromFMTable + | Record< + string, + Column> + > = keyof InferSchemaOutputFromFMTable, + TNestedExpands extends ExpandedRelations = {}, + >( + targetTable: ValidExpandTarget, + callback?: ( + builder: QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >, + ) => QueryBuilder, + ): QueryBuilder< + Occ, + Selected, + SingleMode, + IsCount, + Expands & { + [K in ExtractTableName]: { + schema: InferSchemaOutputFromFMTable; + selected: TSelected; + nested: TNestedExpands; + }; + } + > { + // Use ExpandBuilder.processExpand to handle the expand logic + type TargetBuilder = QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + const expandConfig = this.expandBuilder.processExpand< + TargetTable, + TargetBuilder + >( + targetTable, + this.occurrence, + callback as ((builder: TargetBuilder) => TargetBuilder) | undefined, + () => + new QueryBuilder({ + occurrence: targetTable, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }), + ); + + this.expandConfigs.push(expandConfig); + return this as any; + } + + single(): QueryBuilder { + return this.cloneWithChanges({ singleMode: "exact" as const }); + } + + maybeSingle(): QueryBuilder { + return this.cloneWithChanges({ singleMode: "maybe" as const }); + } + + count(): QueryBuilder { + return this.cloneWithChanges({ + isCountMode: true as const, + queryOptions: { count: true }, + }); + } + + /** + * Builds the OData query string from current query options and expand configs. + */ + private buildQueryString(): string { + // Build query without expand and select (we'll add them manually if using entity IDs) + const queryOptionsWithoutExpandAndSelect = { ...this.queryOptions }; + const originalSelect = queryOptionsWithoutExpandAndSelect.select; + delete queryOptionsWithoutExpandAndSelect.expand; + delete queryOptionsWithoutExpandAndSelect.select; + + let queryString = buildQuery(queryOptionsWithoutExpandAndSelect); + + // Use shared helper for select/expand portion + const selectArray = originalSelect + ? Array.isArray(originalSelect) + ? originalSelect.map(String) + : [String(originalSelect)] + : undefined; + + const selectExpandString = buildSelectExpandQueryString({ + selectedFields: selectArray, + expandConfigs: this.expandConfigs, + table: this.occurrence, + useEntityIds: this.databaseUseEntityIds, + logger: this.logger, + }); + + // Append select/expand to existing query string + if (selectExpandString) { + // Strip leading ? from helper result and append with appropriate separator + const params = selectExpandString.startsWith("?") + ? selectExpandString.slice(1) + : selectExpandString; + const separator = queryString.includes("?") ? "&" : "?"; + queryString = `${queryString}${separator}${params}`; + } + + return queryString; + } + + async execute( + options?: ExecuteMethodOptions, + ): Promise< + Result< + ConditionallyWithODataAnnotations< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands + >, + EO["includeODataAnnotations"] extends true ? true : false + > + > + > { + const mergedOptions = this.mergeExecuteOptions(options); + const queryString = this.buildQueryString(); + + // Handle $count endpoint + if (this.isCountMode) { + const url = this.urlBuilder.build(queryString, { + isCount: true, + useEntityIds: mergedOptions.useEntityIds, + navigation: this.navigation, + }); + const result = await this.context._makeRequest(url, mergedOptions); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + // OData returns count as a string, convert to number + const count = + typeof result.data === "string" ? Number(result.data) : result.data; + return { data: count as number, error: undefined } as any; + } + + const url = this.urlBuilder.build(queryString, { + isCount: this.isCountMode, + useEntityIds: mergedOptions.useEntityIds, + navigation: this.navigation, + }); + + const result = await this.context._makeRequest(url, mergedOptions); + + if (result.error) { + return { data: undefined, error: result.error }; + } + + return processQueryResponse(result.data, { + occurrence: this.occurrence, + singleMode: this.singleMode, + queryOptions: this.queryOptions as any, + expandConfigs: this.expandConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + logger: this.logger, + }); + } + + getQueryString(): string { + const queryString = this.buildQueryString(); + return this.urlBuilder.buildPath(queryString, { + useEntityIds: this.databaseUseEntityIds, + navigation: this.navigation, + }); + } + + getRequestConfig(): { method: string; url: string; body?: any } { + const queryString = this.buildQueryString(); + const url = this.urlBuilder.build(queryString, { + isCount: this.isCountMode, + useEntityIds: this.databaseUseEntityIds, + navigation: this.navigation, + }); + + return { + method: "GET", + url, + }; + } + + toRequest(baseUrl: string, options?: ExecuteOptions): Request { + const config = this.getRequestConfig(); + return createODataRequest(baseUrl, config, options); + } + + async processResponse( + response: Response, + options?: ExecuteOptions, + ): Promise< + Result< + QueryReturnType< + InferSchemaOutputFromFMTable, + Selected, + SingleMode, + IsCount, + Expands + > + > + > { + // Check for error responses (important for batch operations) + if (!response.ok) { + const error = await parseErrorResponse( + response, + response.url || + `/${this.databaseName}/${getTableName(this.occurrence)}`, + ); + return { data: undefined, error }; + } + + // Handle 204 No Content (shouldn't happen for queries, but handle it gracefully) + if (response.status === 204) { + // Return empty list for list queries, null for single queries + if (this.singleMode !== false) { + if (this.singleMode === "maybe") { + return { data: null as any, error: undefined }; + } + return { + data: undefined, + error: new RecordCountMismatchError("one", 0), + }; + } + return { data: [] as any, error: undefined }; + } + + // Parse the response body (using safeJsonParse to handle FileMaker's invalid JSON with unquoted ? values) + let rawData; + try { + rawData = await safeJsonParse(response); + } catch (err) { + // Check if it's an empty body error (common with 204 responses) + if (err instanceof SyntaxError && response.status === 204) { + // Handled above, but just in case + return { data: [] as any, error: undefined }; + } + return { + data: undefined, + error: { + name: "ResponseParseError", + message: `Failed to parse response JSON: ${err instanceof Error ? err.message : "Unknown error"}`, + timestamp: new Date(), + } as any, + }; + } + + if (!rawData) { + return { + data: undefined, + error: { + name: "ResponseError", + message: "Response body was empty or null", + timestamp: new Date(), + } as any, + }; + } + + const mergedOptions = this.mergeExecuteOptions(options); + return processQueryResponse(rawData, { + occurrence: this.occurrence, + singleMode: this.singleMode, + queryOptions: this.queryOptions as any, + expandConfigs: this.expandConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + logger: this.logger, + }); + } +} diff --git a/packages/fmodata/src/client/query/response-processor.ts b/packages/fmodata/src/client/query/response-processor.ts new file mode 100644 index 00000000..c3140601 --- /dev/null +++ b/packages/fmodata/src/client/query/response-processor.ts @@ -0,0 +1,246 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import type { QueryOptions } from "odata-query"; +import type { FMTable } from "../../orm/table"; +import type { Result } from "../../types"; +import { RecordCountMismatchError } from "../../errors"; +import { transformResponseFields } from "../../transform"; +import { validateListResponse, validateSingleResponse } from "../../validation"; +import type { ExpandValidationConfig } from "../../validation"; +import type { ExpandConfig } from "./expand-builder"; +import { FMTable as FMTableClass } from "../../orm/table"; +import { InternalLogger } from "../../logger"; + +/** + * Configuration for processing query responses + */ +export interface ProcessQueryResponseConfig { + occurrence?: FMTable; + singleMode: "exact" | "maybe" | false; + queryOptions: Partial>; + expandConfigs: ExpandConfig[]; + skipValidation?: boolean; + useEntityIds?: boolean; + // Mapping from field names to output keys (for renamed fields in select) + fieldMapping?: Record; + logger: InternalLogger; +} + +/** + * Builds expand validation configs from internal expand configurations. + * These are used to validate expanded navigation properties. + */ +function buildExpandValidationConfigs( + configs: ExpandConfig[], +): ExpandValidationConfig[] { + return configs.map((config) => { + // Get target table/occurrence from config (stored during expand call) + const targetTable = config.targetTable; + + // Extract schema from target table/occurrence + let targetSchema: Record | undefined; + if (targetTable) { + const tableSchema = (targetTable as any)[FMTableClass.Symbol.Schema]; + if (tableSchema) { + const zodSchema = tableSchema["~standard"]?.schema; + if ( + zodSchema && + typeof zodSchema === "object" && + "shape" in zodSchema + ) { + targetSchema = zodSchema.shape as Record; + } + } + } + + // Extract selected fields from options + const selectedFields = config.options?.select + ? Array.isArray(config.options.select) + ? config.options.select.map((f) => String(f)) + : [String(config.options.select)] + : undefined; + + return { + relation: config.relation, + targetSchema: targetSchema, + targetTable: targetTable, + table: targetTable, // For transformation + selectedFields: selectedFields, + nestedExpands: undefined, // TODO: Handle nested expands if needed + }; + }); +} + +/** + * Extracts records from response data without validation. + * Handles both single and list responses. + */ +function extractRecords( + data: any, + singleMode: "exact" | "maybe" | false, +): Result { + const resp = data as any; + if (singleMode !== false) { + const records = resp.value ?? [resp]; + const count = Array.isArray(records) ? records.length : 1; + + if (count > 1) { + return { + data: undefined, + error: new RecordCountMismatchError( + singleMode === "exact" ? "one" : "at-most-one", + count, + ), + }; + } + + if (count === 0) { + if (singleMode === "exact") { + return { + data: undefined, + error: new RecordCountMismatchError("one", 0), + }; + } + return { data: null as any, error: undefined }; + } + + const record = Array.isArray(records) ? records[0] : records; + return { data: record as any, error: undefined }; + } else { + // Handle list response structure + const records = resp.value ?? []; + return { data: records as any, error: undefined }; + } +} + +/** + * Renames fields in response data according to the field mapping. + * Used when select() is called with renamed fields (e.g., { userEmail: users.email }). + */ +function renameFieldsInResponse( + data: any, + fieldMapping: Record, +): any { + if (!data || typeof data !== "object") { + return data; + } + + // Handle array responses + if (Array.isArray(data)) { + return data.map((item) => renameFieldsInResponse(item, fieldMapping)); + } + + // Handle OData list response structure + if ("value" in data && Array.isArray(data.value)) { + return { + ...data, + value: data.value.map((item: any) => + renameFieldsInResponse(item, fieldMapping), + ), + }; + } + + // Handle single record + const renamed: Record = {}; + for (const [key, value] of Object.entries(data)) { + // Check if this field should be renamed + const outputKey = fieldMapping[key]; + if (outputKey) { + renamed[outputKey] = value; + } else { + renamed[key] = value; + } + } + return renamed; +} + +/** + * Processes a query response by transforming field IDs and validating the data. + * This function consolidates the response processing logic that was duplicated + * across multiple navigation branches in QueryBuilder.execute(). + */ +export async function processQueryResponse( + response: any, + config: ProcessQueryResponseConfig, +): Promise> { + const { occurrence, singleMode, skipValidation, useEntityIds, fieldMapping } = + config; + + // Transform response if needed + let data = response; + if (occurrence && useEntityIds) { + const expandValidationConfigs = buildExpandValidationConfigs( + config.expandConfigs, + ); + data = transformResponseFields( + response, + occurrence, + expandValidationConfigs, + ); + } + + // Skip validation path + if (skipValidation) { + const result = extractRecords(data, singleMode); + // Rename fields AFTER extraction (but before returning) + if (result.data && fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + ...result, + data: renameFieldsInResponse(result.data, fieldMapping), + }; + } + return result; + } + + // Validation path + // Get schema from occurrence if available + let schema: Record | undefined; + if (occurrence) { + const tableSchema = (occurrence as any)[FMTableClass.Symbol.Schema]; + if (tableSchema) { + const zodSchema = tableSchema["~standard"]?.schema; + if (zodSchema && typeof zodSchema === "object" && "shape" in zodSchema) { + schema = zodSchema.shape as Record; + } + } + } + + const selectedFields = config.queryOptions.select + ? ((Array.isArray(config.queryOptions.select) + ? config.queryOptions.select.map((f) => String(f)) + : [String(config.queryOptions.select)]) as (keyof T)[]) + : undefined; + const expandValidationConfigs = buildExpandValidationConfigs( + config.expandConfigs, + ); + + // Validate with original field names + const validationResult = + singleMode !== false + ? await validateSingleResponse( + data, + schema, + selectedFields as string[] | undefined, + expandValidationConfigs, + singleMode, + ) + : await validateListResponse( + data, + schema, + selectedFields as string[] | undefined, + expandValidationConfigs, + ); + + if (!validationResult.valid) { + return { data: undefined, error: validationResult.error }; + } + + // Rename fields AFTER validation completes + if (fieldMapping && Object.keys(fieldMapping).length > 0) { + return { + data: renameFieldsInResponse(validationResult.data, fieldMapping), + error: undefined, + }; + } + + return { data: validationResult.data as any, error: undefined }; +} diff --git a/packages/fmodata/src/client/query/types.ts b/packages/fmodata/src/client/query/types.ts new file mode 100644 index 00000000..a3b81441 --- /dev/null +++ b/packages/fmodata/src/client/query/types.ts @@ -0,0 +1,99 @@ +import type { Column } from "../../orm/column"; + +/** + * Type-safe orderBy type that provides better DX than odata-query's default. + * + * Supported forms: + * - `keyof T` - single field name (defaults to ascending) + * - `[keyof T, 'asc' | 'desc']` - single field with explicit direction + * - `Array<[keyof T, 'asc' | 'desc']>` - multiple fields with directions + * + * This type intentionally EXCLUDES `Array` to avoid ambiguity + * between [field1, field2] and [field, direction]. + */ +export type TypeSafeOrderBy = + | (keyof T & string) // Single field name + | [keyof T & string, "asc" | "desc"] // Single field with direction + | Array<[keyof T & string, "asc" | "desc"]>; // Multiple fields with directions + +// Internal type for expand configuration +export type ExpandConfig = { + relation: string; + options?: Partial>; + targetTable?: import("../../orm/table").FMTable; +}; + +// Type to represent expanded relations +export type ExpandedRelations = Record< + string, + { schema: any; selected: any; nested?: ExpandedRelations } +>; + +/** + * Extract the value type from a Column. + * This uses the phantom type stored in Column to get the actual value type (output type for reading). + */ +type ExtractColumnType = + C extends Column ? T : never; + +/** + * Map a select object to its return type. + * For each key in the select object, extract the type from the corresponding Column. + */ +type MapSelectToReturnType< + TSelect extends Record>, + TSchema extends Record, +> = { + [K in keyof TSelect]: ExtractColumnType; +}; + +/** + * Helper: Resolve a single expand's return type, including nested expands + */ +export type ResolveExpandType< + Exp extends { schema: any; selected: any; nested?: ExpandedRelations }, +> = // Handle the selected fields +(Exp["selected"] extends Record> + ? MapSelectToReturnType + : Exp["selected"] extends keyof Exp["schema"] + ? Pick + : Exp["schema"]) & + // Recursively handle nested expands + (Exp["nested"] extends ExpandedRelations + ? ResolveExpandedRelations + : {}); + +/** + * Helper: Resolve all expanded relations recursively + */ +export type ResolveExpandedRelations = { + [K in keyof Exps]: ResolveExpandType[]; +}; + +export type QueryReturnType< + T extends Record, + Selected extends keyof T | Record>, + SingleMode extends "exact" | "maybe" | false, + IsCount extends boolean, + Expands extends ExpandedRelations, +> = IsCount extends true + ? number + : // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions + [Selected] extends [Record>] + ? SingleMode extends "exact" + ? MapSelectToReturnType & ResolveExpandedRelations + : SingleMode extends "maybe" + ? + | (MapSelectToReturnType & + ResolveExpandedRelations) + | null + : (MapSelectToReturnType & + ResolveExpandedRelations)[] + : // Use tuple wrapping to prevent distribution over union of keys + [Selected] extends [keyof T] + ? SingleMode extends "exact" + ? Pick & ResolveExpandedRelations + : SingleMode extends "maybe" + ? (Pick & ResolveExpandedRelations) | null + : (Pick & ResolveExpandedRelations)[] + : never; diff --git a/packages/fmodata/src/client/query/url-builder.ts b/packages/fmodata/src/client/query/url-builder.ts new file mode 100644 index 00000000..f9b466d4 --- /dev/null +++ b/packages/fmodata/src/client/query/url-builder.ts @@ -0,0 +1,179 @@ +import type { FMTable } from "../../orm/table"; +import { getTableName } from "../../orm/table"; +import { resolveTableId } from "../builders/table-utils"; +import type { ExecutionContext } from "../../types"; + +/** + * Configuration for navigation from RecordBuilder or EntitySet + */ +export interface NavigationConfig { + recordId?: string | number; + relation: string; + sourceTableName: string; + baseRelation?: string; // For chained navigations from navigated EntitySets + basePath?: string; // Full base path for chained entity set navigations +} + +/** + * Builds OData query URLs for different navigation modes. + * Handles: + * - Record navigation: /database/sourceTable('recordId')/relation + * - Entity set navigation: /database/sourceTable/relation + * - Count endpoint: /database/tableId/$count + * - Standard queries: /database/tableId + */ +export class QueryUrlBuilder { + constructor( + private databaseName: string, + private occurrence: FMTable, + private context: ExecutionContext, + ) {} + + /** + * Builds the full URL for a query request. + * + * @param queryString - The OData query string (e.g., "?$filter=...&$select=...") + * @param options - Options including whether this is a count query, useEntityIds override, and navigation config + */ + build( + queryString: string, + options: { + isCount?: boolean; + useEntityIds?: boolean; + navigation?: NavigationConfig; + }, + ): string { + const tableId = resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + options.useEntityIds, + ); + + const navigation = options.navigation; + if (navigation?.recordId && navigation?.relation) { + return this.buildRecordNavigation(queryString, tableId, navigation); + } + if (navigation?.relation) { + return this.buildEntitySetNavigation(queryString, tableId, navigation); + } + if (options.isCount) { + return `/${this.databaseName}/${tableId}/$count${queryString}`; + } + return `/${this.databaseName}/${tableId}${queryString}`; + } + + /** + * Builds URL for record navigation: /database/sourceTable('recordId')/relation + * or /database/sourceTable/baseRelation('recordId')/relation for chained navigations + */ + private buildRecordNavigation( + queryString: string, + tableId: string, + navigation: NavigationConfig, + ): string { + const { sourceTableName, baseRelation, recordId, relation } = navigation; + const base = baseRelation + ? `${sourceTableName}/${baseRelation}('${recordId}')` + : `${sourceTableName}('${recordId}')`; + return `/${this.databaseName}/${base}/${relation}${queryString}`; + } + + /** + * Builds URL for entity set navigation: /database/sourceTable/relation + * or /database/basePath/relation for chained navigations + */ + private buildEntitySetNavigation( + queryString: string, + tableId: string, + navigation: NavigationConfig, + ): string { + const { sourceTableName, basePath, relation } = navigation; + const base = basePath || sourceTableName; + return `/${this.databaseName}/${base}/${relation}${queryString}`; + } + + /** + * Builds a query string path (without database prefix) for getQueryString(). + * Used when the full URL is not needed. + */ + buildPath( + queryString: string, + options?: { useEntityIds?: boolean; navigation?: NavigationConfig }, + ): string { + const useEntityIds = options?.useEntityIds; + const navigation = options?.navigation; + const tableId = resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + useEntityIds, + ); + + if (navigation?.recordId && navigation?.relation) { + const { sourceTableName, baseRelation, recordId, relation } = navigation; + const base = baseRelation + ? `${sourceTableName}/${baseRelation}('${recordId}')` + : `${sourceTableName}('${recordId}')`; + return queryString + ? `/${base}/${relation}${queryString}` + : `/${base}/${relation}`; + } + if (navigation?.relation) { + const { sourceTableName, basePath, relation } = navigation; + const base = basePath || sourceTableName; + return queryString + ? `/${base}/${relation}${queryString}` + : `/${base}/${relation}`; + } + return queryString ? `/${tableId}${queryString}` : `/${tableId}`; + } + + /** + * Build URL for record operations (single record by ID). + * Used by RecordBuilder to build URLs like /database/table('id'). + * + * @param recordId - The record ID + * @param queryString - The OData query string (e.g., "?$select=...") + * @param options - Options including operation type and useEntityIds override + */ + buildRecordUrl( + recordId: string | number, + queryString: string, + options?: { + operation?: "getSingleField"; + operationParam?: string; + useEntityIds?: boolean; + isNavigateFromEntitySet?: boolean; + navigateSourceTableName?: string; + navigateRelation?: string; + }, + ): string { + const tableId = resolveTableId( + this.occurrence, + getTableName(this.occurrence), + this.context, + options?.useEntityIds, + ); + + // Build the base URL depending on whether this came from a navigated EntitySet + let url: string; + if ( + options?.isNavigateFromEntitySet && + options.navigateSourceTableName && + options.navigateRelation + ) { + // From navigated EntitySet: /sourceTable/relation('recordId') + url = `/${this.databaseName}/${options.navigateSourceTableName}/${options.navigateRelation}('${recordId}')`; + } else { + // Normal record: /tableName('recordId') - use FMTID if configured + url = `/${this.databaseName}/${tableId}('${recordId}')`; + } + + if (options?.operation === "getSingleField" && options.operationParam) { + url += `/${options.operationParam}`; + } + + return url + queryString; + } +} diff --git a/packages/fmodata/src/client/record-builder.ts b/packages/fmodata/src/client/record-builder.ts index 3dd872b4..48f66b8f 100644 --- a/packages/fmodata/src/client/record-builder.ts +++ b/packages/fmodata/src/client/record-builder.ts @@ -2,164 +2,137 @@ import type { ExecutionContext, ExecutableBuilder, Result, - ODataRecordMetadata, ODataFieldResponse, - InferSchemaType, ExecuteOptions, - WithSystemFields, ConditionallyWithODataAnnotations, + ExecuteMethodOptions, } from "../types"; -import { getAcceptHeader } from "../types"; -import type { TableOccurrence } from "./table-occurrence"; -import type { BaseTable } from "./base-table"; -import { - transformTableName, - transformResponseFields, - getTableIdentifiers, - transformFieldNamesArray, -} from "../transform"; +import type { + FMTable, + InferSchemaOutputFromFMTable, + ValidExpandTarget, + ExtractTableName, + ValidateNoContainerFields, +} from "../orm/table"; +import { getTableName, getNavigationPaths } from "../orm/table"; import { safeJsonParse } from "./sanitize-json"; import { parseErrorResponse } from "./error-parser"; import { QueryBuilder } from "./query-builder"; -import { - validateSingleResponse, - type ExpandValidationConfig, -} from "../validation"; import { type FFetchOptions } from "@fetchkit/ffetch"; -import { StandardSchemaV1 } from "@standard-schema/spec"; -import { QueryOptions } from "odata-query"; -import buildQuery from "odata-query"; - -// Helper type to extract schema from a TableOccurrence -type ExtractSchemaFromOccurrence = - O extends TableOccurrence - ? BT extends BaseTable - ? S - : never - : never; - -// Helper type to extract navigation relation names from an occurrence -type ExtractNavigationNames< - O extends TableOccurrence | undefined, -> = - O extends TableOccurrence - ? Nav extends Record - ? keyof Nav & string - : never - : never; - -// Helper type to find target occurrence by relation name -type FindNavigationTarget< - O extends TableOccurrence | undefined, - Name extends string, -> = - O extends TableOccurrence - ? Nav extends Record - ? Name extends keyof Nav - ? Nav[Name] - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - > - : TableOccurrence< - BaseTable, any, any, any>, - any, - any, - any - >; - -// Helper type to get the inferred schema type from a target occurrence -type GetTargetSchemaType< - O extends TableOccurrence | undefined, - Rel extends string, -> = [FindNavigationTarget] extends [ - TableOccurrence, -] - ? [BT] extends [BaseTable] - ? [S] extends [Record] - ? InferSchemaType - : Record - : Record - : Record; - -// Internal type for expand configuration -type ExpandConfig = { - relation: string; - options?: Partial>; +import { isColumn, type Column } from "../orm/column"; +import { + type ExpandConfig, + type ExpandedRelations, + ExpandBuilder, + resolveTableId, + mergeExecuteOptions, + processODataResponse, + getSchemaFromTable, + processSelectWithRenames, + buildSelectExpandQueryString, + createODataRequest, +} from "./builders/index"; +import { + type ResolveExpandedRelations, + type ResolveExpandType, +} from "./query/types"; +import { createLogger, InternalLogger, Logger } from "../logger"; + +/** + * Extract the value type from a Column. + * This uses the phantom type stored in Column to get the actual value type. + */ +type ExtractColumnType = C extends Column ? T : never; + +/** + * Map a select object to its return type. + * For each key in the select object, extract the type from the corresponding Column. + */ +type MapSelectToReturnType< + TSelect extends Record>, + TSchema extends Record, +> = { + [K in keyof TSelect]: ExtractColumnType; }; -// Type to represent expanded relations -export type ExpandedRelations = Record; - // Return type for RecordBuilder execute export type RecordReturnType< - T extends Record, + Schema extends Record, IsSingleField extends boolean, - FieldKey extends keyof T, - Selected extends keyof T, + FieldColumn extends Column | undefined, + Selected extends + | keyof Schema + | Record>>>, Expands extends ExpandedRelations, > = IsSingleField extends true - ? T[FieldKey] - : Pick & { - [K in keyof Expands]: Pick< - Expands[K]["schema"], - Expands[K]["selected"] - >[]; - }; + ? FieldColumn extends Column + ? TOutput + : never + : // Use tuple wrapping [Selected] extends [...] to prevent distribution over unions + [Selected] extends [Record>] + ? MapSelectToReturnType & + ResolveExpandedRelations + : // Use tuple wrapping to prevent distribution over union of keys + [Selected] extends [keyof Schema] + ? Pick & ResolveExpandedRelations + : never; export class RecordBuilder< - T extends Record, + Occ extends FMTable = FMTable, IsSingleField extends boolean = false, - FieldKey extends keyof T = keyof T, - Occ extends TableOccurrence | undefined = - | TableOccurrence - | undefined, - Selected extends keyof T = keyof T, + FieldColumn extends Column | undefined = undefined, + Selected extends + | keyof InferSchemaOutputFromFMTable> + | Record< + string, + Column>> + > = keyof InferSchemaOutputFromFMTable>, Expands extends ExpandedRelations = {}, > implements ExecutableBuilder< - RecordReturnType + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands + > > { - private occurrence?: Occ; - private tableName: string; + private table: Occ; private databaseName: string; private context: ExecutionContext; private recordId: string | number; private operation?: "getSingleField" | "navigate"; private operationParam?: string; + private operationColumn?: Column; private isNavigateFromEntitySet?: boolean; private navigateRelation?: string; private navigateSourceTableName?: string; private databaseUseEntityIds: boolean; - // New properties for select/expand support + // Properties for select/expand support private selectedFields?: string[]; private expandConfigs: ExpandConfig[] = []; + // Mapping from field names to output keys (for renamed fields in select) + private fieldMapping?: Record; + + private logger: InternalLogger; constructor(config: { - occurrence?: Occ; - tableName: string; + occurrence: Occ; databaseName: string; context: ExecutionContext; recordId: string | number; databaseUseEntityIds?: boolean; }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; + this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.recordId = config.recordId; this.databaseUseEntityIds = config.databaseUseEntityIds ?? false; + this.logger = config.context?._getLogger?.() ?? createLogger(); } /** @@ -168,11 +141,7 @@ export class RecordBuilder< private mergeExecuteOptions( options?: RequestInit & FFetchOptions & ExecuteOptions, ): RequestInit & FFetchOptions & { useEntityIds?: boolean } { - // If useEntityIds is not set in options, use the database-level setting - return { - ...options, - useEntityIds: options?.useEntityIds ?? this.databaseUseEntityIds, - }; + return mergeExecuteOptions(options, this.databaseUseEntityIds); } /** @@ -180,70 +149,93 @@ export class RecordBuilder< * @param useEntityIds - Optional override for entity ID usage */ private getTableId(useEntityIds?: boolean): string { - if (!this.occurrence) { - return this.tableName; + if (!this.table) { + throw new Error("Table occurrence is required"); } - - const contextDefault = this.context._getUseEntityIds?.() ?? false; - const shouldUseIds = useEntityIds ?? contextDefault; - - if (shouldUseIds) { - const identifiers = getTableIdentifiers(this.occurrence); - if (!identifiers.id) { - throw new Error( - `useEntityIds is true but TableOccurrence "${identifiers.name}" does not have an fmtId defined`, - ); - } - return identifiers.id; - } - - return this.occurrence.getTableName(); + return resolveTableId( + this.table, + getTableName(this.table), + this.context, + useEntityIds, + ); } - getSingleField( - field: K, - ): RecordBuilder { - const newBuilder = new RecordBuilder({ - occurrence: this.occurrence, - tableName: this.tableName, + /** + * Creates a new RecordBuilder with modified configuration. + * Used by select() to create new instances. + */ + private cloneWithChanges< + NewSelected extends + | keyof InferSchemaOutputFromFMTable> + | Record< + string, + Column>> + > = Selected, + >(changes: { + selectedFields?: string[]; + fieldMapping?: Record; + }): RecordBuilder { + const newBuilder = new RecordBuilder< + Occ, + false, + FieldColumn, + NewSelected, + Expands + >({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, recordId: this.recordId, databaseUseEntityIds: this.databaseUseEntityIds, }); - newBuilder.operation = "getSingleField"; - newBuilder.operationParam = field.toString(); + newBuilder.selectedFields = changes.selectedFields ?? this.selectedFields; + newBuilder.fieldMapping = changes.fieldMapping ?? this.fieldMapping; + newBuilder.expandConfigs = [...this.expandConfigs]; // Preserve navigation context newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; newBuilder.navigateRelation = this.navigateRelation; newBuilder.navigateSourceTableName = this.navigateSourceTableName; + newBuilder.operationColumn = this.operationColumn; return newBuilder; } - /** - * Select specific fields to retrieve from the record. - * Only the selected fields will be returned in the response. - * - * @example - * ```typescript - * const contact = await db.from("contacts").get("uuid").select("name", "email").execute(); - * // contact.data has type { name: string; email: string } - * ``` - */ - select( - ...fields: K[] - ): RecordBuilder { - const uniqueFields = [...new Set(fields)]; - const newBuilder = new RecordBuilder({ - occurrence: this.occurrence, - tableName: this.tableName, + getSingleField< + TColumn extends Column>, any>, + >( + column: TColumn, + ): RecordBuilder< + Occ, + true, + TColumn, + keyof InferSchemaOutputFromFMTable>, + {} + > { + // Runtime validation: ensure column is from the correct table + const tableName = getTableName(this.table); + if (!column.isFromTable(tableName)) { + throw new Error( + `Column ${column.toString()} is not from table ${tableName}`, + ); + } + + const newBuilder = new RecordBuilder< + Occ, + true, + TColumn, + keyof InferSchemaOutputFromFMTable>, + {} + >({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, recordId: this.recordId, databaseUseEntityIds: this.databaseUseEntityIds, }); - newBuilder.selectedFields = uniqueFields.map((f) => String(f)); - newBuilder.expandConfigs = [...this.expandConfigs]; + newBuilder.operation = "getSingleField"; + newBuilder.operationColumn = column; + newBuilder.operationParam = column.getFieldIdentifier( + this.databaseUseEntityIds, + ); // Preserve navigation context newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; newBuilder.navigateRelation = this.navigateRelation; @@ -251,93 +243,97 @@ export class RecordBuilder< return newBuilder; } + /** + * Select fields using column references. + * Allows renaming fields by using different keys in the object. + * Container fields cannot be selected and will cause a type error. + * + * @example + * db.from(contacts).get("uuid").select({ + * name: contacts.name, + * userEmail: contacts.email // renamed! + * }) + * + * @param fields - Object mapping output keys to column references (container fields excluded) + * @returns RecordBuilder with updated selected fields + */ + select< + TSelect extends Record< + string, + Column, false> + >, + >(fields: TSelect): RecordBuilder { + const tableName = getTableName(this.table); + const { selectedFields, fieldMapping } = processSelectWithRenames( + fields, + tableName, + this.logger, + ); + + return this.cloneWithChanges({ + selectedFields, + fieldMapping: + Object.keys(fieldMapping).length > 0 ? fieldMapping : undefined, + }) as any; + } + /** * Expand a navigation property to include related records. * Supports nested select, filter, orderBy, and expand operations. * * @example * ```typescript - * // Simple expand - * const contact = await db.from("contacts").get("uuid").expand("users").execute(); + * // Simple expand with FMTable object + * const contact = await db.from(contacts).get("uuid").expand(users).execute(); * * // Expand with select - * const contact = await db.from("contacts").get("uuid") - * .expand("users", b => b.select("username", "email")) + * const contact = await db.from(contacts).get("uuid") + * .expand(users, b => b.select({ username: users.username, email: users.email })) * .execute(); * ``` */ expand< - Rel extends ExtractNavigationNames | (string & {}), - TargetOcc extends FindNavigationTarget = FindNavigationTarget< - Occ, - Rel - >, - TargetSchema extends GetTargetSchemaType = GetTargetSchemaType< - Occ, - Rel - >, - TargetSelected extends keyof TargetSchema = keyof TargetSchema, + TargetTable extends FMTable, + TSelected extends + | keyof InferSchemaOutputFromFMTable + | Record< + string, + Column> + > = keyof InferSchemaOutputFromFMTable, + TNestedExpands extends ExpandedRelations = {}, >( - relation: Rel, + targetTable: ValidExpandTarget, callback?: ( builder: QueryBuilder< - TargetSchema, - keyof TargetSchema, + TargetTable, + keyof InferSchemaOutputFromFMTable, false, false, - TargetOcc extends TableOccurrence - ? TargetOcc - : undefined + {} >, - ) => QueryBuilder< - WithSystemFields, - TargetSelected, - any, - any, - any - >, + ) => QueryBuilder, ): RecordBuilder< - T, - false, - FieldKey, Occ, + false, + FieldColumn, Selected, Expands & { - [K in Rel]: { schema: TargetSchema; selected: TargetSelected }; + [K in ExtractTableName]: { + schema: InferSchemaOutputFromFMTable; + selected: TSelected; + nested: TNestedExpands; + }; } > { - // Look up target occurrence from navigation - const targetOccurrence = this.occurrence?.navigation[relation as string]; - - // Helper function to get defaultSelect fields from target occurrence - const getDefaultSelectFields = (): string[] | undefined => { - if (!targetOccurrence) return undefined; - const defaultSelect = targetOccurrence.defaultSelect; - if (defaultSelect === "schema") { - const schema = targetOccurrence.baseTable?.schema; - if (schema) { - return [...new Set(Object.keys(schema))]; - } - } else if (Array.isArray(defaultSelect)) { - return [...new Set(defaultSelect)]; - } - // If "all", return undefined (no select restriction) - return undefined; - }; - // Create new builder with updated types const newBuilder = new RecordBuilder< - T, - false, - FieldKey, Occ, + false, + FieldColumn, Selected, - Expands & { - [K in Rel]: { schema: TargetSchema; selected: TargetSelected }; - } + any >({ - occurrence: this.occurrence, - tableName: this.tableName, + occurrence: this.table, databaseName: this.databaseName, context: this.context, recordId: this.recordId, @@ -346,319 +342,138 @@ export class RecordBuilder< // Copy existing state newBuilder.selectedFields = this.selectedFields; + newBuilder.fieldMapping = this.fieldMapping; newBuilder.expandConfigs = [...this.expandConfigs]; newBuilder.isNavigateFromEntitySet = this.isNavigateFromEntitySet; newBuilder.navigateRelation = this.navigateRelation; newBuilder.navigateSourceTableName = this.navigateSourceTableName; + newBuilder.operationColumn = this.operationColumn; - if (callback) { - // Create a new QueryBuilder for the target occurrence - const targetBuilder = new QueryBuilder({ - occurrence: targetOccurrence, - tableName: targetOccurrence?.name ?? (relation as string), - databaseName: this.databaseName, - context: this.context, - databaseUseEntityIds: this.databaseUseEntityIds, - }); - - // Cast to the expected type for the callback - // At runtime, the builder is untyped (any), but at compile-time we enforce proper types - const typedBuilder = targetBuilder as QueryBuilder< - TargetSchema, - keyof TargetSchema, - false, - false, - TargetOcc extends TableOccurrence - ? TargetOcc - : undefined - >; - - // Pass to callback and get configured builder - const configuredBuilder = callback(typedBuilder); - - // Extract the builder's query options - const expandOptions: Partial> = { - ...(configuredBuilder as any).queryOptions, - }; + // Use ExpandBuilder.processExpand to handle the expand logic + const expandBuilder = new ExpandBuilder( + this.databaseUseEntityIds, + this.logger, + ); + type TargetBuilder = QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false, + {} + >; + const expandConfig = expandBuilder.processExpand< + TargetTable, + TargetBuilder + >( + targetTable, + this.table ?? undefined, + callback as ((builder: TargetBuilder) => TargetBuilder) | undefined, + () => + new QueryBuilder({ + occurrence: targetTable, + databaseName: this.databaseName, + context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, + }), + ); - // If callback didn't provide select, apply defaultSelect from target occurrence - if (!expandOptions.select) { - const defaultFields = getDefaultSelectFields(); - if (defaultFields) { - expandOptions.select = defaultFields; - } - } + newBuilder.expandConfigs.push(expandConfig); + return newBuilder as any; + } - // If the configured builder has nested expands, we need to include them - if ((configuredBuilder as any).expandConfigs?.length > 0) { - // Build nested expand string from the configured builder's expand configs - const nestedExpandString = this.buildExpandString( - (configuredBuilder as any).expandConfigs, + navigate>( + targetTable: ValidExpandTarget, + ): QueryBuilder< + TargetTable, + keyof InferSchemaOutputFromFMTable, + false, + false + > { + // Extract name and validate + const relationName = getTableName(targetTable); + + // Runtime validation: Check if relation name is in navigationPaths + if (this.table) { + const navigationPaths = getNavigationPaths(this.table); + if (navigationPaths && !navigationPaths.includes(relationName)) { + this.logger.warn( + `Cannot navigate to "${relationName}". Valid navigation paths: ${navigationPaths.length > 0 ? navigationPaths.join(", ") : "none"}`, ); - if (nestedExpandString) { - // Add nested expand to options - expandOptions.expand = nestedExpandString as any; - } - } - - const expandConfig: ExpandConfig = { - relation: relation as string, - options: expandOptions, - }; - - newBuilder.expandConfigs.push(expandConfig); - } else { - // Simple expand without callback - apply defaultSelect if available - const defaultFields = getDefaultSelectFields(); - if (defaultFields) { - newBuilder.expandConfigs.push({ - relation: relation as string, - options: { select: defaultFields }, - }); - } else { - newBuilder.expandConfigs.push({ relation: relation as string }); } } - return newBuilder; - } - - // Overload for valid relation names - returns typed QueryBuilder - navigate>( - relationName: RelationName, - ): QueryBuilder< - ExtractSchemaFromOccurrence< - FindNavigationTarget - > extends Record - ? InferSchemaType< - ExtractSchemaFromOccurrence> - > - : Record - >; - // Overload for arbitrary strings - returns generic QueryBuilder with system fields - navigate( - relationName: string, - ): QueryBuilder<{ ROWID: number; ROWMODID: number; [key: string]: any }>; - // Implementation - navigate(relationName: string): QueryBuilder { - // Use the target occurrence if available, otherwise allow untyped navigation - // (useful when types might be incomplete) - const targetOccurrence = this.occurrence?.navigation[relationName]; - const builder = new QueryBuilder({ - occurrence: targetOccurrence, - tableName: targetOccurrence?.name ?? relationName, + // Create QueryBuilder with target table + const builder = new QueryBuilder({ + occurrence: targetTable, databaseName: this.databaseName, context: this.context, + databaseUseEntityIds: this.databaseUseEntityIds, }); - // Store the navigation info - we'll use it in execute - // Transform relation name to FMTID if using entity IDs - const relationId = targetOccurrence - ? transformTableName(targetOccurrence) - : relationName; - (builder as any).isNavigate = true; - (builder as any).navigateRecordId = this.recordId; - (builder as any).navigateRelation = relationId; + // Store the navigation info - we'll use it in execute + // Use relation name as-is (entity ID handling is done in QueryBuilder) + const relationId = relationName; // If this RecordBuilder came from a navigated EntitySet, we need to preserve that base path + let sourceTableName: string; + let baseRelation: string | undefined; if ( this.isNavigateFromEntitySet && this.navigateSourceTableName && this.navigateRelation ) { // Build the base path: /sourceTable/relation('recordId')/newRelation - (builder as any).navigateSourceTableName = this.navigateSourceTableName; - (builder as any).navigateBaseRelation = this.navigateRelation; + sourceTableName = this.navigateSourceTableName; + baseRelation = this.navigateRelation; } else { // Normal record navigation: /tableName('recordId')/relation - // Transform source table name to FMTID if using entity IDs - const sourceTableId = this.occurrence - ? transformTableName(this.occurrence) - : this.tableName; - (builder as any).navigateSourceTableName = sourceTableId; + // Use table ID if available, otherwise table name + if (!this.table) { + throw new Error("Table occurrence is required for navigation"); + } + sourceTableName = resolveTableId( + this.table, + getTableName(this.table), + this.context, + this.databaseUseEntityIds, + ); } - return builder; - } - - /** - * Formats select fields for use in query strings. - * - Transforms field names to FMFIDs if using entity IDs - * - Wraps "id" fields in double quotes - * - URL-encodes special characters but preserves spaces - */ - private formatSelectFields( - select: string[] | undefined, - baseTable?: BaseTable, - useEntityIds?: boolean, - ): string { - if (!select || select.length === 0) return ""; - - // Transform to field IDs if using entity IDs AND the feature is enabled - const shouldTransform = - baseTable && (useEntityIds ?? this.databaseUseEntityIds); - const transformedFields = shouldTransform - ? transformFieldNamesArray(select, baseTable) - : select; - - return transformedFields - .map((field) => { - if (field === "id") return `"id"`; - const encodedField = encodeURIComponent(String(field)); - return encodedField.replace(/%20/g, " "); - }) - .join(","); - } - - /** - * Builds expand validation configs from internal expand configurations. - * These are used to validate expanded navigation properties. - */ - private buildExpandValidationConfigs( - configs: ExpandConfig[], - ): ExpandValidationConfig[] { - return configs.map((config) => { - // Look up target occurrence from navigation - const targetOccurrence = this.occurrence?.navigation[config.relation]; - const targetSchema = targetOccurrence?.baseTable?.schema; - - // Extract selected fields from options - const selectedFields = config.options?.select - ? Array.isArray(config.options.select) - ? config.options.select.map((f) => String(f)) - : [String(config.options.select)] - : undefined; - - return { - relation: config.relation, - targetSchema: targetSchema, - targetOccurrence: targetOccurrence, - targetBaseTable: targetOccurrence?.baseTable, - occurrence: targetOccurrence, // For transformation - selectedFields: selectedFields, - nestedExpands: undefined, // TODO: Handle nested expands if needed - }; - }); - } - - /** - * Builds OData expand query string from expand configurations. - * Handles nested expands recursively. - * Transforms relation names to FMTIDs if using entity IDs. - */ - private buildExpandString(configs: ExpandConfig[]): string { - if (configs.length === 0) { - return ""; - } + (builder as any).navigation = { + recordId: this.recordId, + relation: relationId, + sourceTableName, + baseRelation, + }; - return configs - .map((config) => { - // Get target occurrence for this relation - const targetOccurrence = this.occurrence?.navigation[config.relation]; - - // When using entity IDs, use the target table's FMTID in the expand parameter - // FileMaker expects FMTID in $expand when Prefer header is set - const relationName = - targetOccurrence && targetOccurrence.isUsingTableId?.() - ? targetOccurrence.getTableId() - : config.relation; - - if (!config.options || Object.keys(config.options).length === 0) { - // Simple expand without options - return relationName; - } - - // Build query options for this expand - const parts: string[] = []; - - if (config.options.select) { - // Pass target base table for field transformation - const selectFields = this.formatSelectFields( - Array.isArray(config.options.select) - ? config.options.select.map((f) => String(f)) - : [String(config.options.select)], - targetOccurrence?.baseTable, - ); - parts.push(`$select=${selectFields}`); - } - - if (config.options.filter) { - // Filter should already be transformed by the nested builder - // Use odata-query to build filter string - const filterQuery = buildQuery({ filter: config.options.filter }); - const filterMatch = filterQuery.match(/\$filter=([^&]+)/); - if (filterMatch) { - parts.push(`$filter=${filterMatch[1]}`); - } - } - - if (config.options.orderBy) { - const orderByQuery = buildQuery({ orderBy: config.options.orderBy }); - const orderByMatch = orderByQuery.match(/\$orderby=([^&]+)/); - if (orderByMatch) { - parts.push(`$orderby=${orderByMatch[1]}`); - } - } - - if (config.options.top !== undefined) { - parts.push(`$top=${config.options.top}`); - } - - if (config.options.skip !== undefined) { - parts.push(`$skip=${config.options.skip}`); - } - - // Handle nested expand - if (config.options.expand) { - // Nested expand is already a string from buildExpandString - parts.push(`$expand=${String(config.options.expand)}`); - } - - if (parts.length === 0) { - return relationName; - } - - return `${relationName}(${parts.join(";")})`; - }) - .join(","); + return builder; } /** * Builds the complete query string including $select and $expand parameters. */ private buildQueryString(): string { - const parts: string[] = []; - - // Build $select - if (this.selectedFields && this.selectedFields.length > 0) { - const selectString = this.formatSelectFields( - this.selectedFields, - this.occurrence?.baseTable, - ); - if (selectString) { - parts.push(`$select=${selectString}`); - } - } - - // Build $expand - const expandString = this.buildExpandString(this.expandConfigs); - if (expandString) { - parts.push(`$expand=${expandString}`); - } - - if (parts.length === 0) { - return ""; - } - - return `?${parts.join("&")}`; + return buildSelectExpandQueryString({ + selectedFields: this.selectedFields, + expandConfigs: this.expandConfigs, + table: this.table, + useEntityIds: this.databaseUseEntityIds, + logger: this.logger, + }); } async execute( - options?: RequestInit & FFetchOptions & EO, + options?: ExecuteMethodOptions, ): Promise< Result< ConditionallyWithODataAnnotations< - RecordReturnType, + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands + >, EO["includeODataAnnotations"] extends true ? true : false > > @@ -701,50 +516,30 @@ export class RecordBuilder< // Handle single field operation if (this.operation === "getSingleField") { // Single field returns a JSON object with @context and value - const fieldResponse = response as ODataFieldResponse; + // The type is extracted from the Column stored in FieldColumn generic + const fieldResponse = response as ODataFieldResponse; return { data: fieldResponse.value as any, error: undefined }; } - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = mergedOptions.useEntityIds ?? false; - - // Build expand validation configs for transformation and validation - const expandValidationConfigs = - this.expandConfigs.length > 0 - ? this.buildExpandValidationConfigs(this.expandConfigs) - : undefined; - - if (this.occurrence?.baseTable && shouldUseIds) { - response = transformResponseFields( - response, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - - // Validate the single record response - const validation = await validateSingleResponse( - response, - schema, - this.selectedFields as (keyof T)[] | undefined, - expandValidationConfigs, - "exact", // Expect exactly one record + // Use shared response processor + const expandBuilder = new ExpandBuilder( + mergedOptions.useEntityIds ?? false, + this.logger, + ); + const expandValidationConfigs = expandBuilder.buildValidationConfigs( + this.expandConfigs, ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - - // Handle null response - if (validation.data === null) { - return { data: null as any, error: undefined }; - } - - return { data: validation.data as any, error: undefined }; + return processODataResponse(response, { + table: this.table, + schema: getSchemaFromTable(this.table), + singleMode: "exact", + selectedFields: this.selectedFields, + expandValidationConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + }); } getRequestConfig(): { method: string; url: string; body?: any } { @@ -764,7 +559,13 @@ export class RecordBuilder< url = `/${this.databaseName}/${tableId}('${this.recordId}')`; } - if (this.operation === "getSingleField" && this.operationParam) { + if (this.operation === "getSingleField" && this.operationColumn) { + // Use the column's getFieldIdentifier to support entity IDs + url += `/${this.operationColumn.getFieldIdentifier( + this.databaseUseEntityIds, + )}`; + } else if (this.operation === "getSingleField" && this.operationParam) { + // Fallback for backwards compatibility (shouldn't happen in normal flow) url += `/${this.operationParam}`; } else { // Add query string for select/expand (only when not getting a single field) @@ -797,7 +598,12 @@ export class RecordBuilder< path = `/${tableId}('${this.recordId}')`; } - if (this.operation === "getSingleField" && this.operationParam) { + if (this.operation === "getSingleField" && this.operationColumn) { + return `${path}/${this.operationColumn.getFieldIdentifier( + this.databaseUseEntityIds, + )}`; + } else if (this.operation === "getSingleField" && this.operationParam) { + // Fallback for backwards compatibility (shouldn't happen in normal flow) return `${path}/${this.operationParam}`; } @@ -807,28 +613,29 @@ export class RecordBuilder< toRequest(baseUrl: string, options?: ExecuteOptions): Request { const config = this.getRequestConfig(); - const fullUrl = `${baseUrl}${config.url}`; - - return new Request(fullUrl, { - method: config.method, - headers: { - "Content-Type": "application/json", - Accept: getAcceptHeader(options?.includeODataAnnotations), - }, - }); + return createODataRequest(baseUrl, config, options); } async processResponse( response: Response, options?: ExecuteOptions, ): Promise< - Result> + Result< + RecordReturnType< + InferSchemaOutputFromFMTable>, + IsSingleField, + FieldColumn, + Selected, + Expands + > + > > { // Check for error responses (important for batch operations) if (!response.ok) { + const tableName = this.table ? getTableName(this.table) : "unknown"; const error = await parseErrorResponse( response, - response.url || `/${this.databaseName}/${this.tableName}`, + response.url || `/${this.databaseName}/${tableName}`, ); return { data: undefined, error }; } @@ -839,50 +646,33 @@ export class RecordBuilder< // Handle single field operation if (this.operation === "getSingleField") { // Single field returns a JSON object with @context and value - const fieldResponse = rawResponse as ODataFieldResponse; + // The type is extracted from the Column stored in FieldColumn generic + const fieldResponse = rawResponse as ODataFieldResponse; return { data: fieldResponse.value as any, error: undefined }; } - // Transform response field IDs back to names if using entity IDs - // Only transform if useEntityIds resolves to true (respects per-request override) - const shouldUseIds = options?.useEntityIds ?? this.databaseUseEntityIds; - - // Build expand validation configs for transformation and validation - const expandValidationConfigs = - this.expandConfigs.length > 0 - ? this.buildExpandValidationConfigs(this.expandConfigs) - : undefined; - - let transformedResponse = rawResponse; - if (this.occurrence?.baseTable && shouldUseIds) { - transformedResponse = transformResponseFields( - rawResponse, - this.occurrence.baseTable, - expandValidationConfigs, - ); - } - - // Get schema from occurrence if available - const schema = this.occurrence?.baseTable?.schema; - - // Validate the single record response - const validation = await validateSingleResponse( - transformedResponse, - schema, - this.selectedFields as (keyof T)[] | undefined, - expandValidationConfigs, - "exact", // Expect exactly one record + // Use shared response processor + const mergedOptions = mergeExecuteOptions( + options, + this.databaseUseEntityIds, + ); + const expandBuilder = new ExpandBuilder( + mergedOptions.useEntityIds ?? false, + this.logger, + ); + const expandValidationConfigs = expandBuilder.buildValidationConfigs( + this.expandConfigs, ); - if (!validation.valid) { - return { data: undefined, error: validation.error }; - } - - // Handle null response - if (validation.data === null) { - return { data: null as any, error: undefined }; - } - - return { data: validation.data as any, error: undefined }; + return processODataResponse(rawResponse, { + table: this.table, + schema: getSchemaFromTable(this.table), + singleMode: "exact", + selectedFields: this.selectedFields, + expandValidationConfigs, + skipValidation: options?.skipValidation, + useEntityIds: mergedOptions.useEntityIds, + fieldMapping: this.fieldMapping, + }); } } diff --git a/packages/fmodata/src/client/response-processor.ts b/packages/fmodata/src/client/response-processor.ts index 09e824b0..92b6ad42 100644 --- a/packages/fmodata/src/client/response-processor.ts +++ b/packages/fmodata/src/client/response-processor.ts @@ -1,5 +1,5 @@ import type { StandardSchemaV1 } from "@standard-schema/spec"; -import type { BaseTable } from "./base-table"; +import type { FMTable } from "../orm/table"; import type { ExecuteOptions } from "../types"; import type { ExpandValidationConfig } from "../validation"; import { ValidationError, ResponseStructureError } from "../errors"; @@ -23,16 +23,15 @@ export type ODataRecordResponse = ODataResponse< } >; - /** - * Transform field IDs back to names using the base table configuration + * Transform field IDs back to names using the table configuration */ export function applyFieldTransformation>( response: ODataResponse | ODataListResponse, - baseTable: BaseTable, any, any, any>, + table: FMTable, expandConfigs?: ExpandValidationConfig[], ): ODataResponse | ODataListResponse { - return transformResponseFields(response, baseTable, expandConfigs) as + return transformResponseFields(response, table, expandConfigs) as | ODataResponse | ODataListResponse; } diff --git a/packages/fmodata/src/client/table-occurrence.ts b/packages/fmodata/src/client/table-occurrence.ts deleted file mode 100644 index d7e41e93..00000000 --- a/packages/fmodata/src/client/table-occurrence.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { BaseTable } from "./base-table"; - -// Helper type to extract schema from BaseTable -type ExtractSchema = - BT extends BaseTable ? S : never; - -// Symbol for internal navigation setting (used by buildOccurrences) -const INTERNAL_NAV = Symbol("internal-navigation"); - -export class TableOccurrence< - BT extends BaseTable = any, - Name extends string = string, - Nav extends Record> = {}, - DefSelect extends - | "all" - | "schema" - | readonly (keyof ExtractSchema)[] = "schema", -> { - public readonly name: Name; - public readonly baseTable: BT; - public readonly navigation: Nav; - public readonly defaultSelect: DefSelect; - public readonly fmtId?: `FMTID:${string}`; - - constructor(config: { - readonly name: Name; - readonly baseTable: BT; - readonly defaultSelect?: DefSelect; - readonly fmtId?: `FMTID:${string}`; - /** @internal Used by buildOccurrences - do not use directly */ - readonly [INTERNAL_NAV]?: Nav; - }) { - this.name = config.name; - this.baseTable = config.baseTable; - this.navigation = (config[INTERNAL_NAV] ?? {}) as Nav; - this.defaultSelect = (config.defaultSelect ?? "schema") as DefSelect; - this.fmtId = config.fmtId; - } - - /** - * Returns the FileMaker table occurrence ID (FMTID) if available, or the table name. - * @returns The FMTID string or the table name - */ - getTableId(): string { - return this.fmtId ?? this.name; - } - - /** - * Returns the table occurrence name. - * @returns The table name - */ - getTableName(): string { - return this.name; - } - - /** - * Returns true if this TableOccurrence is using FileMaker table occurrence IDs. - */ - isUsingTableId(): boolean { - return this.fmtId !== undefined; - } - - /** - * @internal Creates a new TableOccurrence with navigation - used by buildOccurrences - */ - static _withNavigation< - BT extends BaseTable, - Name extends string, - Nav extends Record>, - DefSelect extends - | "all" - | "schema" - | readonly (keyof ExtractSchema)[] = "schema", - >( - base: TableOccurrence, - navigation: Nav, - ): TableOccurrence { - return new TableOccurrence({ - name: base.name, - baseTable: base.baseTable, - defaultSelect: base.defaultSelect, - fmtId: base.fmtId, - [INTERNAL_NAV]: navigation, - }) as TableOccurrence; - } -} - -// Helper function to create TableOccurrence with proper type inference -export function createTableOccurrence< - const Name extends string, - BT extends BaseTable, - DefSelect extends - | "all" - | "schema" - | readonly (keyof ExtractSchema)[] = "schema", ->(config: { - name: Name; - baseTable: BT; - defaultSelect?: DefSelect; - fmtId?: `FMTID:${string}`; -}): TableOccurrence { - return new TableOccurrence(config); -} - -/** - * Creates a TableOccurrence with proper TypeScript type inference. - * - * Use this function to create TableOccurrence instances with full type safety. - * For navigation between tables, use `buildOccurrences()` after defining your TOs. - * - * @example - * ```ts - * const users = defineTableOccurrence({ - * name: "users", - * baseTable: usersBase, - * }); - * ``` - * - * @example With entity IDs - * ```ts - * const products = defineTableOccurrence({ - * name: "products", - * baseTable: productsBase, - * fmtId: "FMTID:12345", - * }); - * ``` - * - * @example With navigation (use buildOccurrences) - * ```ts - * const _users = defineTableOccurrence({ name: "users", baseTable: usersBase }); - * const _contacts = defineTableOccurrence({ name: "contacts", baseTable: contactsBase }); - * - * const [users, contacts] = buildOccurrences({ - * occurrences: [_users, _contacts], - * navigation: { - * users: ["contacts"], - * contacts: ["users"], - * }, - * }); - * ``` - */ -export function defineTableOccurrence< - const Name extends string, - BT extends BaseTable, - const DefSelect extends - | "all" - | "schema" - | readonly (keyof ExtractSchema)[] = "schema", ->(config: { - readonly name: Name; - readonly baseTable: BT; - readonly fmtId?: `FMTID:${string}`; - readonly defaultSelect?: DefSelect; -}): TableOccurrence { - return new TableOccurrence(config); -} diff --git a/packages/fmodata/src/client/update-builder.ts b/packages/fmodata/src/client/update-builder.ts index 1ae95a0b..a2b2292b 100644 --- a/packages/fmodata/src/client/update-builder.ts +++ b/packages/fmodata/src/client/update-builder.ts @@ -4,48 +4,47 @@ import type { Result, WithSystemFields, ExecuteOptions, + ExecuteMethodOptions, } from "../types"; import { getAcceptHeader } from "../types"; -import type { TableOccurrence } from "./table-occurrence"; -import type { BaseTable } from "./base-table"; +import type { FMTable, InferSchemaOutputFromFMTable } from "../orm/table"; +import { + getTableName, + getTableId as getTableIdHelper, + getBaseTableConfig, + isUsingEntityIds, +} from "../orm/table"; import { QueryBuilder } from "./query-builder"; import { type FFetchOptions } from "@fetchkit/ffetch"; -import { - transformFieldNamesToIds, - transformTableName, - getTableIdentifiers, -} from "../transform"; +import { transformFieldNamesToIds } from "../transform"; import { parseErrorResponse } from "./error-parser"; +import { validateAndTransformInput } from "../validation"; /** * Initial update builder returned from EntitySet.update(data) * Requires calling .byId() or .where() before .execute() is available */ export class UpdateBuilder< - T extends Record, - BT extends BaseTable, + Occ extends FMTable, ReturnPreference extends "minimal" | "representation" = "minimal", > { - private tableName: string; private databaseName: string; private context: ExecutionContext; - private occurrence?: TableOccurrence; - private data: Partial; + private table: Occ; + private data: Partial>; private returnPreference: ReturnPreference; private databaseUseEntityIds: boolean; constructor(config: { - occurrence?: TableOccurrence; - tableName: string; + occurrence: Occ; databaseName: string; context: ExecutionContext; - data: Partial; + data: Partial>; returnPreference: ReturnPreference; databaseUseEntityIds?: boolean; }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; + this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.data = config.data; @@ -59,10 +58,9 @@ export class UpdateBuilder< */ byId( id: string | number, - ): ExecutableUpdateBuilder { - return new ExecutableUpdateBuilder({ - occurrence: this.occurrence, - tableName: this.tableName, + ): ExecutableUpdateBuilder { + return new ExecutableUpdateBuilder({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, data: this.data, @@ -79,20 +77,11 @@ export class UpdateBuilder< * @param fn Callback that receives a QueryBuilder for building the filter */ where( - fn: ( - q: QueryBuilder>, - ) => QueryBuilder>, - ): ExecutableUpdateBuilder { + fn: (q: QueryBuilder) => QueryBuilder, + ): ExecutableUpdateBuilder { // Create a QueryBuilder for the user to configure - const queryBuilder = new QueryBuilder< - WithSystemFields, - keyof WithSystemFields, - false, - false, - undefined - >({ - occurrence: undefined, - tableName: this.tableName, + const queryBuilder = new QueryBuilder({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, }); @@ -100,9 +89,8 @@ export class UpdateBuilder< // Let the user configure it const configuredBuilder = fn(queryBuilder); - return new ExecutableUpdateBuilder({ - occurrence: this.occurrence, - tableName: this.tableName, + return new ExecutableUpdateBuilder({ + occurrence: this.table, databaseName: this.databaseName, context: this.context, data: this.data, @@ -120,39 +108,38 @@ export class UpdateBuilder< * Can return either updated count or full record based on returnFullRecord option */ export class ExecutableUpdateBuilder< - T extends Record, + Occ extends FMTable, IsByFilter extends boolean, ReturnPreference extends "minimal" | "representation" = "minimal", > implements ExecutableBuilder< - ReturnPreference extends "minimal" ? { updatedCount: number } : T + ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable > { - private tableName: string; private databaseName: string; private context: ExecutionContext; - private occurrence?: TableOccurrence; - private data: Partial; + private table: Occ; + private data: Partial>; private mode: "byId" | "byFilter"; private recordId?: string | number; - private queryBuilder?: QueryBuilder; + private queryBuilder?: QueryBuilder; private returnPreference: ReturnPreference; private databaseUseEntityIds: boolean; constructor(config: { - occurrence?: TableOccurrence; - tableName: string; + occurrence: Occ; databaseName: string; context: ExecutionContext; - data: Partial; + data: Partial>; mode: "byId" | "byFilter"; recordId?: string | number; - queryBuilder?: QueryBuilder; + queryBuilder?: QueryBuilder; returnPreference: ReturnPreference; databaseUseEntityIds?: boolean; }) { - this.occurrence = config.occurrence; - this.tableName = config.tableName; + this.table = config.occurrence; this.databaseName = config.databaseName; this.context = config.context; this.data = config.data; @@ -181,30 +168,29 @@ export class ExecutableUpdateBuilder< * @param useEntityIds - Optional override for entity ID usage */ private getTableId(useEntityIds?: boolean): string { - if (!this.occurrence) { - return this.tableName; - } - const contextDefault = this.context._getUseEntityIds?.() ?? false; const shouldUseIds = useEntityIds ?? contextDefault; if (shouldUseIds) { - const identifiers = getTableIdentifiers(this.occurrence); - if (!identifiers.id) { + if (!isUsingEntityIds(this.table)) { throw new Error( - `useEntityIds is true but TableOccurrence "${identifiers.name}" does not have an fmtId defined`, + `useEntityIds is true but table "${getTableName(this.table)}" does not have entity IDs configured`, ); } - return identifiers.id; + return getTableIdHelper(this.table); } - return this.occurrence.getTableName(); + return getTableName(this.table); } async execute( - options?: RequestInit & FFetchOptions & { useEntityIds?: boolean }, + options?: ExecuteMethodOptions, ): Promise< - Result + Result< + ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable + > > { // Merge database-level useEntityIds with per-request options const mergedOptions = this.mergeExecuteOptions(options); @@ -212,14 +198,31 @@ export class ExecutableUpdateBuilder< // Get table identifier with override support const tableId = this.getTableId(mergedOptions.useEntityIds); + // Validate and transform input data using input validators (writeValidators) + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + // If validation fails, return error immediately + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + // Transform field names to FMFIDs if using entity IDs // Only transform if useEntityIds resolves to true (respects per-request override) const shouldUseIds = mergedOptions.useEntityIds ?? false; const transformedData = - this.occurrence?.baseTable && shouldUseIds - ? transformFieldNamesToIds(this.data, this.occurrence.baseTable) - : this.data; + this.table && shouldUseIds + ? transformFieldNamesToIds(validatedData, this.table) + : validatedData; let url: string; @@ -236,10 +239,11 @@ export class ExecutableUpdateBuilder< const queryString = this.queryBuilder.getQueryString(); // The query string will have the tableId already transformed by QueryBuilder // Remove the leading "/" and table name from the query string as we'll build our own URL + const tableName = getTableName(this.table); const queryParams = queryString.startsWith(`/${tableId}`) ? queryString.slice(`/${tableId}`.length) - : queryString.startsWith(`/${this.tableName}`) - ? queryString.slice(`/${this.tableName}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) : queryString; url = `/${this.databaseName}/${tableId}${queryParams}`; @@ -274,7 +278,7 @@ export class ExecutableUpdateBuilder< return { data: response as ReturnPreference extends "minimal" ? { updatedCount: number } - : T, + : InferSchemaOutputFromFMTable, error: undefined, }; } else { @@ -291,7 +295,7 @@ export class ExecutableUpdateBuilder< return { data: { updatedCount } as ReturnPreference extends "minimal" ? { updatedCount: number } - : T, + : InferSchemaOutputFromFMTable, error: undefined, }; } @@ -299,12 +303,13 @@ export class ExecutableUpdateBuilder< getRequestConfig(): { method: string; url: string; body?: any } { // For batch operations, use database-level setting (no per-request override available here) + // Note: Input validation happens in execute() and processResponse() for batch operations const tableId = this.getTableId(this.databaseUseEntityIds); // Transform field names to FMFIDs if using entity IDs const transformedData = - this.occurrence?.baseTable && this.databaseUseEntityIds - ? transformFieldNamesToIds(this.data, this.occurrence.baseTable) + this.table && this.databaseUseEntityIds + ? transformFieldNamesToIds(this.data, this.table) : this.data; let url: string; @@ -317,10 +322,11 @@ export class ExecutableUpdateBuilder< } const queryString = this.queryBuilder.getQueryString(); + const tableName = getTableName(this.table); const queryParams = queryString.startsWith(`/${tableId}`) ? queryString.slice(`/${tableId}`.length) - : queryString.startsWith(`/${this.tableName}`) - ? queryString.slice(`/${this.tableName}`.length) + : queryString.startsWith(`/${tableName}`) + ? queryString.slice(`/${tableName}`.length) : queryString; url = `/${this.databaseName}/${tableId}${queryParams}`; @@ -351,13 +357,18 @@ export class ExecutableUpdateBuilder< response: Response, options?: ExecuteOptions, ): Promise< - Result + Result< + ReturnPreference extends "minimal" + ? { updatedCount: number } + : InferSchemaOutputFromFMTable + > > { // Check for error responses (important for batch operations) if (!response.ok) { + const tableName = getTableName(this.table); const error = await parseErrorResponse( response, - response.url || `/${this.databaseName}/${this.tableName}`, + response.url || `/${this.databaseName}/${tableName}`, ); return { data: undefined, error }; } @@ -371,20 +382,37 @@ export class ExecutableUpdateBuilder< return { data: { updatedCount } as ReturnPreference extends "minimal" ? { updatedCount: number } - : T, + : InferSchemaOutputFromFMTable, error: undefined, }; } const rawResponse = JSON.parse(text); + // Validate and transform input data using input validators (writeValidators) + // This is needed for processResponse because it's called from batch operations + // where the data hasn't been validated yet + let validatedData = this.data; + if (this.table) { + const baseTableConfig = getBaseTableConfig(this.table); + const inputSchema = baseTableConfig.inputSchema; + try { + validatedData = await validateAndTransformInput(this.data, inputSchema); + } catch (error) { + return { + data: undefined, + error: error instanceof Error ? error : new Error(String(error)), + } as any; + } + } + // Handle based on return preference if (this.returnPreference === "representation") { // Return the full updated record return { data: rawResponse as ReturnPreference extends "minimal" ? { updatedCount: number } - : T, + : InferSchemaOutputFromFMTable, error: undefined, }; } else { @@ -401,7 +429,7 @@ export class ExecutableUpdateBuilder< return { data: { updatedCount } as ReturnPreference extends "minimal" ? { updatedCount: number } - : T, + : InferSchemaOutputFromFMTable, error: undefined, }; } diff --git a/packages/fmodata/src/filter-types.ts b/packages/fmodata/src/filter-types.ts deleted file mode 100644 index 294cb807..00000000 --- a/packages/fmodata/src/filter-types.ts +++ /dev/null @@ -1,97 +0,0 @@ -import type { StandardSchemaV1 } from "@standard-schema/spec"; - -// Operator types for each value type -export type StringOperators = - | { eq: string | null } - | { ne: string | null } - | { gt: string } - | { ge: string } - | { lt: string } - | { le: string } - | { contains: string } - | { startswith: string } - | { endswith: string } - | { in: string[] }; - -export type NumberOperators = - | { eq: number | null } - | { ne: number | null } - | { gt: number } - | { ge: number } - | { lt: number } - | { le: number } - | { in: number[] }; - -export type BooleanOperators = - | { eq: boolean | null } - | { ne: boolean | null }; - -export type DateOperators = - | { eq: Date | null } - | { ne: Date | null } - | { gt: Date } - | { ge: Date } - | { lt: Date } - | { le: Date } - | { in: Date[] }; - -// Infer output type from StandardSchemaV1 -export type InferOutput = S extends StandardSchemaV1 - ? Output - : never; - -// Map inferred types to their operators -export type OperatorsForType = - T extends string | null | undefined ? StringOperators : - T extends number | null | undefined ? NumberOperators : - T extends boolean | null | undefined ? BooleanOperators : - T extends Date | null | undefined ? DateOperators : - never; - -// Get operators for a schema field -export type OperatorsForSchemaField = - OperatorsForType>; - -// Field filter: shorthand, single operator, or operator array -export type FieldFilter = - | InferOutput // Shorthand: { name: "John" } - | OperatorsForSchemaField // Single operator: { age: { gt: 18 } } - | Array>; // Multiple operators: { age: [{ gt: 18 }, { lt: 65 }] } - -// Logical operators (recursive) -export type LogicalFilter> = { - and?: Array>; - or?: Array>; - not?: TypedFilter; -}; - -// Helper to check if Schema is exactly Record (untyped) -// Uses double extends check to ensure Schema is exactly the generic type, not a more specific type -type IsUntypedSchema = - [Record] extends [Schema] - ? [Schema] extends [Record] - ? true - : false - : false; - -// Main filter type -export type TypedFilter> = - | LogicalFilter - | ( - IsUntypedSchema extends true - ? { - // For untyped schemas, allow arbitrary string keys with empty object intersection (preserves autocomplete) - [key: string]: FieldFilter | any; - } & {} - : { - // For typed schemas, use specific keys (preserves autocomplete) - [K in keyof Schema]?: FieldFilter; - } - ); - -// Top-level filter (can be array for implicit AND) -export type Filter> = - | TypedFilter - | Array> - | string; // Escape hatch for raw OData expressions - diff --git a/packages/fmodata/src/index.ts b/packages/fmodata/src/index.ts index 50502300..745d4f99 100644 --- a/packages/fmodata/src/index.ts +++ b/packages/fmodata/src/index.ts @@ -1,14 +1,60 @@ // Barrel file - exports all public API from the client folder // Main API - use these functions to create tables and occurrences -export { defineBaseTable } from "./client/base-table"; -export { defineTableOccurrence } from "./client/table-occurrence"; -export { buildOccurrences } from "./client/build-occurrences"; export { FMServerConnection } from "./client/filemaker-odata"; +// NEW ORM API - Drizzle-inspired field builders and operators +export { + // Field builders + textField, + numberField, + dateField, + timeField, + timestampField, + containerField, + calcField, + type FieldBuilder, + // Table definition + fmTableOccurrence, + FMTable, + type FMTableWithColumns as TableOccurrenceResult, + type InferTableSchema, + // Table helper functions + // getTableFields, + // getDefaultSelect, + // getBaseTableConfig, + // getFieldId, + // getFieldName, + // getTableId, + getTableColumns, + // Column references + type Column, + isColumn, + // Filter operators + type FilterExpression, + eq, + ne, + gt, + gte, + lt, + lte, + contains, + startsWith, + endsWith, + inArray, + notInArray, + isNull, + isNotNull, + and, + or, + not, + // OrderBy operators + type OrderByExpression, + asc, + desc, +} from "./orm/index"; + // Type-only exports - for type annotations only, not direct instantiation -export type { BaseTable } from "./client/base-table"; -export type { TableOccurrence } from "./client/table-occurrence"; export type { Database } from "./client/database"; export type { EntitySet } from "./client/entity-set"; export type { @@ -28,25 +74,14 @@ export type { BatchResult, BatchItemResult, InferSchemaType, - InsertData, - UpdateData, ODataRecordMetadata, Metadata, + FetchHandler, + ExecuteMethodOptions, + ExecuteOptions, } from "./types"; -// Filter types -export type { - Filter, - TypedFilter, - FieldFilter, - StringOperators, - NumberOperators, - BooleanOperators, - DateOperators, - LogicalFilter, -} from "./filter-types"; - -// Re-export ffetch errors +// Re-export ffetch errors and types export { TimeoutError, AbortError, @@ -55,6 +90,8 @@ export { CircuitOpenError, } from "@fetchkit/ffetch"; +export type { FFetchOptions } from "@fetchkit/ffetch"; + // Export our errors export { FMODataError, @@ -78,3 +115,5 @@ export { } from "./errors"; export type { FMODataErrorType } from "./errors"; + +export type { Logger } from "./logger"; diff --git a/packages/fmodata/src/logger.test.ts b/packages/fmodata/src/logger.test.ts new file mode 100644 index 00000000..05c89be4 --- /dev/null +++ b/packages/fmodata/src/logger.test.ts @@ -0,0 +1,34 @@ +import { describe, expect, it } from "vitest"; +import type { LogLevel } from "./logger"; +import { shouldPublishLog } from "./logger"; + +describe("shouldPublishLog", () => { + const testCases: { + currentLogLevel: LogLevel; + logLevel: LogLevel; + expected: boolean; + }[] = [ + { currentLogLevel: "debug", logLevel: "debug", expected: true }, + { currentLogLevel: "debug", logLevel: "info", expected: true }, + { currentLogLevel: "debug", logLevel: "warn", expected: true }, + { currentLogLevel: "debug", logLevel: "error", expected: true }, + { currentLogLevel: "info", logLevel: "debug", expected: false }, + { currentLogLevel: "info", logLevel: "info", expected: true }, + { currentLogLevel: "info", logLevel: "warn", expected: true }, + { currentLogLevel: "info", logLevel: "error", expected: true }, + { currentLogLevel: "warn", logLevel: "debug", expected: false }, + { currentLogLevel: "warn", logLevel: "info", expected: false }, + { currentLogLevel: "warn", logLevel: "warn", expected: true }, + { currentLogLevel: "warn", logLevel: "error", expected: true }, + { currentLogLevel: "error", logLevel: "debug", expected: false }, + { currentLogLevel: "error", logLevel: "info", expected: false }, + { currentLogLevel: "error", logLevel: "warn", expected: false }, + { currentLogLevel: "error", logLevel: "error", expected: true }, + ]; + + testCases.forEach(({ currentLogLevel, logLevel, expected }) => { + it(`should return "${expected}" when currentLogLevel is "${currentLogLevel}" and logLevel is "${logLevel}"`, () => { + expect(shouldPublishLog(currentLogLevel, logLevel)).toBe(expected); + }); + }); +}); diff --git a/packages/fmodata/src/logger.ts b/packages/fmodata/src/logger.ts new file mode 100644 index 00000000..41841920 --- /dev/null +++ b/packages/fmodata/src/logger.ts @@ -0,0 +1,140 @@ +export const TTY_COLORS = { + reset: "\x1b[0m", + bright: "\x1b[1m", + dim: "\x1b[2m", + undim: "\x1b[22m", + underscore: "\x1b[4m", + blink: "\x1b[5m", + reverse: "\x1b[7m", + hidden: "\x1b[8m", + fg: { + black: "\x1b[30m", + red: "\x1b[31m", + green: "\x1b[32m", + yellow: "\x1b[33m", + blue: "\x1b[34m", + magenta: "\x1b[35m", + cyan: "\x1b[36m", + white: "\x1b[37m", + }, + bg: { + black: "\x1b[40m", + red: "\x1b[41m", + green: "\x1b[42m", + yellow: "\x1b[43m", + blue: "\x1b[44m", + magenta: "\x1b[45m", + cyan: "\x1b[46m", + white: "\x1b[47m", + }, +} as const; + +export type LogLevel = "debug" | "info" | "success" | "warn" | "error"; + +export const levels = ["debug", "info", "success", "warn", "error"] as const; + +export function shouldPublishLog( + currentLogLevel: LogLevel, + logLevel: LogLevel, +): boolean { + return levels.indexOf(logLevel) >= levels.indexOf(currentLogLevel); +} + +export interface Logger { + disabled?: boolean | undefined; + disableColors?: boolean | undefined; + level?: Exclude | undefined; + log?: + | (( + level: Exclude, + message: string, + ...args: any[] + ) => void) + | undefined; +} + +export type LogHandlerParams = + Parameters> extends [LogLevel, ...infer Rest] + ? Rest + : never; + +const levelColors: Record = { + info: TTY_COLORS.fg.blue, + success: TTY_COLORS.fg.green, + warn: TTY_COLORS.fg.yellow, + error: TTY_COLORS.fg.red, + debug: TTY_COLORS.fg.magenta, +}; + +const formatMessage = ( + level: LogLevel, + message: string, + colorsEnabled: boolean, +): string => { + const timestamp = new Date().toISOString(); + + if (colorsEnabled) { + return `${TTY_COLORS.dim}${timestamp}${TTY_COLORS.reset} ${ + levelColors[level] + }${level.toUpperCase()}${TTY_COLORS.reset} ${TTY_COLORS.bright}[FMODATA]:${ + TTY_COLORS.reset + } ${message}`; + } + + return `${timestamp} ${level.toUpperCase()} [FMODATA]: ${message}`; +}; + +export type InternalLogger = { + [K in LogLevel]: (...params: LogHandlerParams) => void; +} & { + get level(): LogLevel; +}; + +export const createLogger = (options?: Logger | undefined): InternalLogger => { + const enabled = options?.disabled !== true; + const logLevel = options?.level ?? "error"; + + const colorsEnabled = options?.disableColors !== true; + + const LogFunc = ( + level: LogLevel, + message: string, + args: any[] = [], + ): void => { + if (!enabled || !shouldPublishLog(logLevel, level)) { + return; + } + + const formattedMessage = formatMessage(level, message, colorsEnabled); + + if (!options || typeof options.log !== "function") { + if (level === "error") { + console.error(formattedMessage, ...args); + } else if (level === "warn") { + console.warn(formattedMessage, ...args); + } else { + console.log(formattedMessage, ...args); + } + return; + } + + options.log(level === "success" ? "info" : level, message, ...args); + }; + + const logger = Object.fromEntries( + levels.map((level) => [ + level, + (...[message, ...args]: LogHandlerParams) => + LogFunc(level, message, args), + ]), + ) as Record void>; + + return { + ...logger, + get level() { + return logLevel; + }, + }; +}; + +export const logger = createLogger(); diff --git a/packages/fmodata/src/orm/column.ts b/packages/fmodata/src/orm/column.ts new file mode 100644 index 00000000..3baf7b05 --- /dev/null +++ b/packages/fmodata/src/orm/column.ts @@ -0,0 +1,106 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; + +/** + * Column represents a type-safe reference to a table field. + * Used in queries, filters, and operators to provide autocomplete and type checking. + * + * @template TOutput - The TypeScript type when reading from the database (output type) + * @template TInput - The TypeScript type when writing to the database (input type, for filters) + * @template TableName - The table name as a string literal type (for validation) + * @template IsContainer - Whether this column represents a container field (cannot be selected) + */ +export class Column< + TOutput = any, + TInput = TOutput, + TableName extends string = string, + IsContainer extends boolean = false, +> { + readonly fieldName: string; + readonly entityId?: `FMFID:${string}`; + readonly tableName: TableName; + readonly tableEntityId?: `FMTID:${string}`; + readonly inputValidator?: StandardSchemaV1; + + // Phantom types for TypeScript inference - never actually hold values + readonly _phantomOutput!: TOutput; + readonly _phantomInput!: TInput; + readonly _isContainer!: IsContainer; + + constructor(config: { + fieldName: string; + entityId?: `FMFID:${string}`; + tableName: TableName; + tableEntityId?: `FMTID:${string}`; + inputValidator?: StandardSchemaV1; + }) { + this.fieldName = config.fieldName; + this.entityId = config.entityId; + this.tableName = config.tableName; + this.tableEntityId = config.tableEntityId; + this.inputValidator = config.inputValidator; + } + + /** + * Get the field identifier (entity ID if available, otherwise field name). + * Used when building OData queries. + */ + getFieldIdentifier(useEntityIds?: boolean): string { + if (useEntityIds && this.entityId) { + return this.entityId; + } + return this.fieldName; + } + + /** + * Get the table identifier (entity ID if available, otherwise table name). + * Used when building OData queries. + */ + getTableIdentifier(useEntityIds?: boolean): string { + if (useEntityIds && this.tableEntityId) { + return this.tableEntityId; + } + return this.tableName; + } + + /** + * Check if this column is from a specific table. + * Useful for validation in cross-table operations. + */ + isFromTable(tableName: string): boolean { + return this.tableName === tableName; + } + + /** + * Create a string representation for debugging. + */ + toString(): string { + return `${this.tableName}.${this.fieldName}`; + } +} + +/** + * Type guard to check if a value is a Column instance. + */ +export function isColumn(value: any): value is Column { + return value instanceof Column; +} + +/** + * Create a Column with proper type inference from the inputValidator. + * This helper ensures TypeScript can infer TInput from the validator's input type. + * @internal + */ +export function createColumn< + TOutput, + TInput, + TName extends string, + IsContainer extends boolean = false, +>(config: { + fieldName: string; + entityId?: `FMFID:${string}`; + tableName: TName; + tableEntityId?: `FMTID:${string}`; + inputValidator?: StandardSchemaV1; +}): Column { + return new Column(config) as Column; +} diff --git a/packages/fmodata/src/orm/field-builders.ts b/packages/fmodata/src/orm/field-builders.ts new file mode 100644 index 00000000..d7acbaa7 --- /dev/null +++ b/packages/fmodata/src/orm/field-builders.ts @@ -0,0 +1,296 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; + +/** + * Branded type for container field's database type. + * This allows TypeScript to distinguish container fields from regular string fields + * at the type level, enabling compile-time exclusion from select operations. + */ +export type ContainerDbType = string & { readonly __container: true }; + +/** + * FieldBuilder provides a fluent API for defining table fields with type-safe metadata. + * Supports chaining methods to configure primary keys, nullability, read-only status, entity IDs, and validators. + * + * @template TOutput - The output type after applying outputValidator (what you get when reading) + * @template TInput - The input type after applying inputValidator (what you pass when writing) + * @template TDbType - The database type (what FileMaker stores/expects) + * @template TReadOnly - Whether this field is read-only (for type-level exclusion from insert/update) + */ +export class FieldBuilder< + TOutput = any, + TInput = TOutput, + TDbType = TOutput, + TReadOnly extends boolean = false, +> { + private _primaryKey = false; + private _notNull = false; + private _readOnly = false; + private _entityId?: `FMFID:${string}`; + private _outputValidator?: StandardSchemaV1; + private _inputValidator?: StandardSchemaV1; + private _fieldType: string; + + constructor(fieldType: string) { + this._fieldType = fieldType; + } + + /** + * Mark this field as the primary key for the table. + * Primary keys are automatically read-only. + */ + primaryKey(): FieldBuilder { + const builder = this._clone() as any; + builder._primaryKey = true; + builder._readOnly = true; // Primary keys are automatically read-only + return builder; + } + + /** + * Mark this field as non-nullable. + * Updates the type to exclude null/undefined. + */ + notNull(): FieldBuilder< + NonNullable, + NonNullable, + NonNullable, + TReadOnly + > { + const builder = this._clone() as any; + builder._notNull = true; + return builder; + } + + /** + * Mark this field as read-only. + * Read-only fields are excluded from insert and update operations. + */ + readOnly(): FieldBuilder { + const builder = this._clone() as any; + builder._readOnly = true; + return builder; + } + + /** + * Assign a FileMaker field ID (FMFID) to this field. + * When useEntityIds is enabled, this ID will be used in API requests instead of the field name. + */ + entityId( + id: `FMFID:${string}`, + ): FieldBuilder { + const builder = this._clone(); + builder._entityId = id; + return builder; + } + + /** + * Set a validator for the output (reading from database). + * The output validator transforms/validates data coming FROM the database in list or get operations. + * + * @example + * numberField().readValidator(z.coerce.boolean()) + * // FileMaker returns 0/1, you get true/false + */ + readValidator( + validator: StandardSchemaV1, + ): FieldBuilder { + const builder = this._clone() as any; + builder._outputValidator = validator; + return builder; + } + + /** + * Set a validator for the input (writing to database). + * The input validator transforms/validates data going TO the database in insert, update, and filter operations. + * + * @example + * numberField().writeValidator(z.boolean().transform(v => v ? 1 : 0)) + * // You pass true/false, FileMaker gets 1/0 + */ + writeValidator( + validator: StandardSchemaV1, + ): FieldBuilder { + const builder = this._clone() as any; + builder._inputValidator = validator; + return builder; + } + + /** + * Get the metadata configuration for this field. + * @internal Used by fmTableOccurrence to extract field configuration + */ + _getConfig() { + return { + fieldType: this._fieldType, + primaryKey: this._primaryKey, + notNull: this._notNull, + readOnly: this._readOnly, + entityId: this._entityId, + outputValidator: this._outputValidator, + inputValidator: this._inputValidator, + }; + } + + /** + * Clone this builder to allow immutable chaining. + * @private + */ + private _clone(): FieldBuilder { + const builder = new FieldBuilder( + this._fieldType, + ); + builder._primaryKey = this._primaryKey; + builder._notNull = this._notNull; + builder._readOnly = this._readOnly; + builder._entityId = this._entityId; + builder._outputValidator = this._outputValidator; + builder._inputValidator = this._inputValidator; + return builder; + } +} + +/** + * Create a text field (Edm.String in FileMaker OData). + * By default, text fields are nullable. + * + * @example + * textField() // string | null + * textField().notNull() // string + * textField().entityId("FMFID:1") // with entity ID + */ +export function textField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "text", + ); +} + +/** + * Create a number field (Edm.Decimal in FileMaker OData). + * By default, number fields are nullable. + * + * @example + * numberField() // number | null + * numberField().notNull() // number + * numberField().outputValidator(z.coerce.boolean()) // transform to boolean on read + */ +export function numberField(): FieldBuilder< + number | null, + number | null, + number | null, + false +> { + return new FieldBuilder( + "number", + ); +} + +/** + * Create a date field (Edm.Date in FileMaker OData). + * By default, date fields are nullable and represented as ISO date strings (YYYY-MM-DD). + * + * @example + * dateField() // string | null (ISO date format) + * dateField().notNull() // string + */ +export function dateField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "date", + ); +} + +/** + * Create a time field (Edm.TimeOfDay in FileMaker OData). + * By default, time fields are nullable and represented as ISO time strings (HH:mm:ss). + * + * @example + * timeField() // string | null (ISO time format) + * timeField().notNull() // string + */ +export function timeField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "time", + ); +} + +/** + * Create a timestamp field (Edm.DateTimeOffset in FileMaker OData). + * By default, timestamp fields are nullable and represented as ISO 8601 strings. + * + * @example + * timestampField() // string | null (ISO 8601 format) + * timestampField().notNull() // string + * timestampField().readOnly() // typical for CreationTimestamp + */ +export function timestampField(): FieldBuilder< + string | null, + string | null, + string | null, + false +> { + return new FieldBuilder( + "timestamp", + ); +} + +/** + * Create a container field (Edm.Stream in FileMaker OData). + * Container fields store binary data and are represented as base64 strings in the API. + * By default, container fields are nullable. + * + * Note: Container fields cannot be selected via .select() - they can only be accessed + * via .getSingleField() due to FileMaker OData API limitations. + * + * @example + * containerField() // string | null (base64 encoded) + * containerField().notNull() // string + */ +export function containerField(): FieldBuilder< + string | null, + string | null, + ContainerDbType | null, + false +> { + return new FieldBuilder< + string | null, + string | null, + ContainerDbType | null, + false + >("container"); +} + +/** + * Create a calculated field (read-only field computed by FileMaker). + * Calculated fields are automatically marked as read-only. + * + * @example + * calcField() // string | null + * calcField().notNull() // string + */ +export function calcField(): FieldBuilder< + string | null, + string | null, + string | null, + true +> { + const builder = new FieldBuilder< + string | null, + string | null, + string | null, + false + >("calculated"); + return builder.readOnly(); +} diff --git a/packages/fmodata/src/orm/index.ts b/packages/fmodata/src/orm/index.ts new file mode 100644 index 00000000..9138b31a --- /dev/null +++ b/packages/fmodata/src/orm/index.ts @@ -0,0 +1,60 @@ +// Field builders - main API for defining table schemas +export { + FieldBuilder, + textField, + numberField, + dateField, + timeField, + timestampField, + containerField, + calcField, + type ContainerDbType, +} from "./field-builders"; + +// Column references - used in queries and filters +export { Column, isColumn } from "./column"; + +// Filter operators - eq, gt, lt, and, or, etc. +export { + FilterExpression, + eq, + ne, + gt, + gte, + lt, + lte, + contains, + startsWith, + endsWith, + inArray, + notInArray, + isNull, + isNotNull, + and, + or, + not, + // OrderBy operators + OrderByExpression, + isOrderByExpression, + asc, + desc, +} from "./operators"; + +// Table definition - fmTableOccurrence function +export { + fmTableOccurrence, + FMTable, + type FMTableWithColumns, + type InferTableSchema, + // Helper functions for accessing FMTable internals + getTableName, + getTableEntityId, + // getTableFields, + getDefaultSelect, + getBaseTableConfig, + isUsingEntityIds, + getFieldId, + getFieldName, + getTableId, + getTableColumns, +} from "./table"; diff --git a/packages/fmodata/src/orm/operators.ts b/packages/fmodata/src/orm/operators.ts new file mode 100644 index 00000000..6cf2c7ac --- /dev/null +++ b/packages/fmodata/src/orm/operators.ts @@ -0,0 +1,487 @@ +import type { Column } from "./column"; +import { isColumn } from "./column"; +import { needsFieldQuoting } from "../client/builders/select-utils"; + +/** + * FilterExpression represents a filter condition that can be used in where() clauses. + * Internal representation of operator expressions that get converted to OData filter syntax. + */ +export class FilterExpression { + constructor( + public readonly operator: string, + public readonly operands: (Column | any | FilterExpression)[], + ) {} + + /** + * Convert this expression to OData filter syntax. + * @internal Used by QueryBuilder + */ + toODataFilter(useEntityIds?: boolean): string { + switch (this.operator) { + // Comparison operators + case "eq": + return this._binaryOp("eq", useEntityIds); + case "ne": + return this._binaryOp("ne", useEntityIds); + case "gt": + return this._binaryOp("gt", useEntityIds); + case "gte": + return this._binaryOp("ge", useEntityIds); + case "lt": + return this._binaryOp("lt", useEntityIds); + case "lte": + return this._binaryOp("le", useEntityIds); + case "in": + return this._inOp(useEntityIds); + case "notIn": + return this._notInOp(useEntityIds); + + // String operators + case "contains": + return this._functionOp("contains", useEntityIds); + case "startsWith": + return this._functionOp("startswith", useEntityIds); + case "endsWith": + return this._functionOp("endswith", useEntityIds); + + // Null checks + case "isNull": + return this._isNullOp(useEntityIds); + case "isNotNull": + return this._isNotNullOp(useEntityIds); + + // Logical operators + case "and": + return this._logicalOp("and", useEntityIds); + case "or": + return this._logicalOp("or", useEntityIds); + case "not": + return this._notOp(useEntityIds); + + default: + throw new Error(`Unknown operator: ${this.operator}`); + } + } + + private _binaryOp(op: string, useEntityIds?: boolean): string { + const [left, right] = this.operands; + // For binary ops, the column is typically the first operand and value is the second + // But we also support column-to-column comparisons, so check both + const columnForValue = + isColumn(left) && !isColumn(right) + ? left + : isColumn(right) && !isColumn(left) + ? right + : undefined; + const leftStr = this._operandToString(left, useEntityIds, columnForValue); + const rightStr = this._operandToString(right, useEntityIds, columnForValue); + return `${leftStr} ${op} ${rightStr}`; + } + + private _functionOp(fnName: string, useEntityIds?: boolean): string { + const [column, value] = this.operands; + const columnInstance = isColumn(column) ? column : undefined; + const columnStr = this._operandToString(column, useEntityIds); + const valueStr = this._operandToString(value, useEntityIds, columnInstance); + return `${fnName}(${columnStr}, ${valueStr})`; + } + + private _inOp(useEntityIds?: boolean): string { + const [column, values] = this.operands; + const columnInstance = isColumn(column) ? column : undefined; + const columnStr = this._operandToString(column, useEntityIds); + const valuesStr = (values as any[]) + .map((v) => this._operandToString(v, useEntityIds, columnInstance)) + .join(", "); + return `${columnStr} in (${valuesStr})`; + } + + private _notInOp(useEntityIds?: boolean): string { + const [column, values] = this.operands; + const columnInstance = isColumn(column) ? column : undefined; + const columnStr = this._operandToString(column, useEntityIds); + const valuesStr = (values as any[]) + .map((v) => this._operandToString(v, useEntityIds, columnInstance)) + .join(", "); + return `not (${columnStr} in (${valuesStr}))`; + } + + private _isNullOp(useEntityIds?: boolean): string { + const [column] = this.operands; + const columnStr = this._operandToString(column, useEntityIds); + return `${columnStr} eq null`; + } + + private _isNotNullOp(useEntityIds?: boolean): string { + const [column] = this.operands; + const columnStr = this._operandToString(column, useEntityIds); + return `${columnStr} ne null`; + } + + private _logicalOp(op: string, useEntityIds?: boolean): string { + const expressions = this.operands.map((expr) => { + if (expr instanceof FilterExpression) { + const innerExpr = expr.toODataFilter(useEntityIds); + // Wrap in parens if it's a logical expression to ensure precedence + if (expr.operator === "and" || expr.operator === "or") { + return `(${innerExpr})`; + } + return innerExpr; + } + throw new Error("Logical operators require FilterExpression operands"); + }); + return expressions.join(` ${op} `); + } + + private _notOp(useEntityIds?: boolean): string { + const [expr] = this.operands; + if (expr instanceof FilterExpression) { + return `not (${expr.toODataFilter(useEntityIds)})`; + } + throw new Error("NOT operator requires a FilterExpression operand"); + } + + private _operandToString( + operand: any, + useEntityIds?: boolean, + column?: Column, + ): string { + if (isColumn(operand)) { + const fieldIdentifier = operand.getFieldIdentifier(useEntityIds); + // Quote field names in OData filters per FileMaker OData API requirements + return needsFieldQuoting(fieldIdentifier) + ? `"${fieldIdentifier}"` + : fieldIdentifier; + } + + // If we have a column with an input validator, apply it to transform the value + let value = operand; + if (column?.inputValidator) { + try { + const result = column.inputValidator["~standard"].validate(value); + // Handle async validators (though they shouldn't be async for filters) + if (result instanceof Promise) { + // For filters, we can't use async validators, so skip transformation + // This is a limitation - async validators won't work in filters + value = operand; + } else if ("issues" in result && result.issues) { + // Validation failed, use original value + value = operand; + } else if ("value" in result) { + // Validation succeeded, use transformed value + value = result.value; + } + } catch (error) { + // If validation throws, use the original value (will likely cause a query error) + // This maintains backward compatibility and allows the server to handle validation + value = operand; + } + } + + if (typeof value === "string") { + return `'${value.replace(/'/g, "''")}'`; // Escape single quotes + } + if (value === null || value === undefined) { + return "null"; + } + if (value instanceof Date) { + return value.toISOString(); + } + return String(value); + } +} + +// ============================================================================ +// Comparison Operators +// ============================================================================ + +/** + * Equal operator - checks if column equals a value or another column. + * + * @example + * eq(users.name, "John") // name equals "John" + * eq(users.id, contacts.id_user) // cross-table comparison + */ +export function eq( + column: Column, + value: NoInfer, +): FilterExpression; +export function eq( + column1: Column, + column2: Column, +): FilterExpression; +export function eq(column: Column, value: any): FilterExpression { + return new FilterExpression("eq", [column, value]); +} + +/** + * Not equal operator - checks if column does not equal a value or another column. + * + * @example + * ne(users.status, "inactive") // status not equal to "inactive" + * ne(users.id, contacts.id_user) // cross-table comparison + */ +export function ne( + column: Column, + value: NoInfer, +): FilterExpression; +export function ne( + column1: Column, + column2: Column, +): FilterExpression; +export function ne(column: Column, value: any): FilterExpression { + return new FilterExpression("ne", [column, value]); +} + +/** + * Greater than operator - checks if column is greater than a value. + * + * @example + * gt(users.age, 18) // age greater than 18 + */ +export function gt( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("gt", [column, value]); +} + +/** + * Greater than or equal operator - checks if column is >= a value. + * + * @example + * gte(users.age, 18) // age >= 18 + */ +export function gte( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("gte", [column, value]); +} + +/** + * Less than operator - checks if column is less than a value. + * + * @example + * lt(users.age, 65) // age less than 65 + */ +export function lt( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("lt", [column, value]); +} + +/** + * Less than or equal operator - checks if column is <= a value. + * + * @example + * lte(users.age, 65) // age <= 65 + */ +export function lte( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("lte", [column, value]); +} + +// ============================================================================ +// String Operators +// ============================================================================ + +/** + * Contains operator - checks if a string column contains a substring. + * + * @example + * contains(users.name, "John") // name contains "John" + */ +export function contains( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("contains", [column, value]); +} + +/** + * Starts with operator - checks if a string column starts with a prefix. + * + * @example + * startsWith(users.email, "admin") // email starts with "admin" + */ +export function startsWith( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("startsWith", [column, value]); +} + +/** + * Ends with operator - checks if a string column ends with a suffix. + * + * @example + * endsWith(users.email, "@example.com") // email ends with "@example.com" + */ +export function endsWith( + column: Column, + value: NoInfer, +): FilterExpression { + return new FilterExpression("endsWith", [column, value]); +} + +// ============================================================================ +// Array Operators +// ============================================================================ + +/** + * In array operator - checks if column value is in an array of values. + * + * @example + * inArray(users.status, ["active", "pending"]) // status is "active" or "pending" + */ +export function inArray( + column: Column, + values: NoInfer[], +): FilterExpression { + return new FilterExpression("in", [column, values]); +} + +/** + * Not in array operator - checks if column value is not in an array of values. + * + * @example + * notInArray(users.status, ["deleted", "banned"]) // status is neither "deleted" nor "banned" + */ +export function notInArray( + column: Column, + values: NoInfer[], +): FilterExpression { + return new FilterExpression("notIn", [column, values]); +} + +// ============================================================================ +// Null Check Operators +// ============================================================================ + +/** + * Is null operator - checks if column value is null. + * + * @example + * isNull(users.deletedAt) // deletedAt is null + */ +export function isNull( + column: Column, +): FilterExpression { + return new FilterExpression("isNull", [column]); +} + +/** + * Is not null operator - checks if column value is not null. + * + * @example + * isNotNull(users.email) // email is not null + */ +export function isNotNull( + column: Column, +): FilterExpression { + return new FilterExpression("isNotNull", [column]); +} + +// ============================================================================ +// Logical Operators +// ============================================================================ + +/** + * AND operator - combines multiple filter expressions with logical AND. + * All expressions must be true for the record to match. + * + * @example + * and( + * eq(users.active, true), + * gt(users.age, 18) + * ) // active is true AND age > 18 + */ +export function and(...expressions: FilterExpression[]): FilterExpression { + if (expressions.length === 0) { + throw new Error("AND operator requires at least one expression"); + } + if (expressions.length === 1 && expressions[0] !== undefined) { + return expressions[0]; + } + return new FilterExpression("and", expressions); +} + +/** + * OR operator - combines multiple filter expressions with logical OR. + * At least one expression must be true for the record to match. + * + * @example + * or( + * eq(users.role, "admin"), + * eq(users.role, "moderator") + * ) // role is "admin" OR "moderator" + */ +export function or(...expressions: FilterExpression[]): FilterExpression { + if (expressions.length === 0) { + throw new Error("OR operator requires at least one expression"); + } + if (expressions.length === 1 && expressions[0] !== undefined) { + return expressions[0]; + } + return new FilterExpression("or", expressions); +} + +/** + * NOT operator - negates a filter expression. + * + * @example + * not(eq(users.status, "deleted")) // status is NOT "deleted" + */ +export function not(expression: FilterExpression): FilterExpression { + return new FilterExpression("not", [expression]); +} + +// ============================================================================ +// OrderBy Operators +// ============================================================================ + +/** + * OrderByExpression represents a sort order specification for a column. + * Used in orderBy() clauses to provide type-safe sorting with direction. + */ +export class OrderByExpression { + constructor( + public readonly column: Column, + public readonly direction: "asc" | "desc", + ) {} +} + +/** + * Type guard to check if a value is an OrderByExpression instance. + */ +export function isOrderByExpression(value: any): value is OrderByExpression { + return value instanceof OrderByExpression; +} + +/** + * Ascending order operator - sorts a column in ascending order. + * + * @example + * asc(users.name) // Sort by name ascending + */ +export function asc( + column: Column, +): OrderByExpression { + return new OrderByExpression(column, "asc"); +} + +/** + * Descending order operator - sorts a column in descending order. + * + * @example + * desc(users.age) // Sort by age descending + */ +export function desc( + column: Column, +): OrderByExpression { + return new OrderByExpression(column, "desc"); +} diff --git a/packages/fmodata/src/orm/table.ts b/packages/fmodata/src/orm/table.ts new file mode 100644 index 00000000..b61d9d73 --- /dev/null +++ b/packages/fmodata/src/orm/table.ts @@ -0,0 +1,767 @@ +import type { StandardSchemaV1 } from "@standard-schema/spec"; +import { FieldBuilder, type ContainerDbType } from "./field-builders"; +import type { FieldBuilder as FieldBuilderType } from "./field-builders"; +import { Column, createColumn } from "./column"; +import { z } from "zod/v4"; + +/** + * Extract the output type from a FieldBuilder. + * This is what you get when reading from the database. + * + * This type extracts the TOutput type parameter, which is set by readValidator() + * and represents the transformed/validated output type. + */ +export type InferFieldOutput = + F extends FieldBuilder ? TOutput : never; + +/** + * Extract the input type from a FieldBuilder. + * This is what you pass when writing to the database. + * + * This type extracts the TInput type parameter, which is set by writeValidator() + * and represents the transformed/validated input type. + */ +type InferFieldInput = + F extends FieldBuilder ? TInput : never; + +/** + * Build a schema type from field builders (output/read types). + */ +type InferSchemaFromFields< + TFields extends Record>, +> = { + [K in keyof TFields]: InferFieldOutput; +}; + +/** + * Build an input schema type from field builders (input/write types). + * Used for insert and update operations. + */ +type InferInputSchemaFromFields< + TFields extends Record>, +> = { + [K in keyof TFields]: InferFieldInput; +}; + +/** + * Check if a field is a container field by inspecting its TDbType. + * Container fields have a branded TDbType that extends ContainerDbType. + */ +type IsContainerField = + F extends FieldBuilder + ? NonNullable extends ContainerDbType + ? true + : false + : false; + +/** + * Extract only selectable (non-container) field keys from a fields record. + * Container fields are excluded because they cannot be selected via $select in FileMaker OData. + */ +type SelectableFieldKeys< + TFields extends Record>, +> = { + [K in keyof TFields]: IsContainerField extends true ? never : K; +}[keyof TFields]; + +/** + * Build a schema type excluding container fields (for query return types). + * This is used to ensure container fields don't appear in the return type + * when using defaultSelect: "schema" or "all". + */ +type InferSelectableSchemaFromFields< + TFields extends Record>, +> = { + [K in SelectableFieldKeys]: InferFieldOutput; +}; + +/** + * Internal Symbols for table properties (hidden from IDE autocomplete). + * These are used to store internal configuration that shouldn't be visible + * when users access table columns. + * @internal - Not exported from public API, only accessible via FMTable.Symbol + */ +const FMTableName = Symbol.for("fmodata:FMTableName"); +const FMTableEntityId = Symbol.for("fmodata:FMTableEntityId"); +const FMTableSchema = Symbol.for("fmodata:FMTableSchema"); +const FMTableFields = Symbol.for("fmodata:FMTableFields"); +const FMTableNavigationPaths = Symbol.for("fmodata:FMTableNavigationPaths"); +const FMTableDefaultSelect = Symbol.for("fmodata:FMTableDefaultSelect"); +const FMTableBaseTableConfig = Symbol.for("fmodata:FMTableBaseTableConfig"); +const FMTableUseEntityIds = Symbol.for("fmodata:FMTableUseEntityIds"); + +/** + * Base table class with Symbol-based internal properties. + * This follows the Drizzle ORM pattern where internal configuration + * is stored via Symbols, keeping it hidden from IDE autocomplete. + */ +export class FMTable< + TFields extends Record> = any, + TName extends string = string, + TNavigationPaths extends readonly string[] = readonly string[], +> { + /** + * Internal Symbols for accessing table metadata. + * @internal - Not intended for public use. Access table properties via columns instead. + */ + static readonly Symbol = { + Name: FMTableName, + EntityId: FMTableEntityId, + UseEntityIds: FMTableUseEntityIds, + Schema: FMTableSchema, + Fields: FMTableFields, + NavigationPaths: FMTableNavigationPaths, + DefaultSelect: FMTableDefaultSelect, + BaseTableConfig: FMTableBaseTableConfig, + }; + + /** @internal */ + [FMTableName]: TName; + + /** @internal */ + [FMTableEntityId]?: `FMTID:${string}`; + + /** @internal */ + [FMTableUseEntityIds]?: boolean; + + /** @internal */ + [FMTableSchema]: StandardSchemaV1>; + + /** @internal */ + [FMTableFields]: TFields; + + /** @internal */ + [FMTableNavigationPaths]: TNavigationPaths; + + /** @internal */ + [FMTableDefaultSelect]: + | "all" + | "schema" + | Record>; + + /** @internal */ + [FMTableBaseTableConfig]: { + schema: Record; + inputSchema?: Record; + idField?: keyof TFields; + required: readonly (keyof TFields)[]; + readOnly: readonly (keyof TFields)[]; + containerFields: readonly (keyof TFields)[]; + fmfIds?: Record; + }; + + constructor(config: { + name: TName; + entityId?: `FMTID:${string}`; + useEntityIds?: boolean; + schema: StandardSchemaV1>; + fields: TFields; + navigationPaths: TNavigationPaths; + defaultSelect: "all" | "schema" | Record>; + baseTableConfig: { + schema: Record; + inputSchema?: Record; + idField?: keyof TFields; + required: readonly (keyof TFields)[]; + readOnly: readonly (keyof TFields)[]; + containerFields: readonly (keyof TFields)[]; + fmfIds?: Record; + }; + }) { + this[FMTableName] = config.name; + this[FMTableEntityId] = config.entityId; + this[FMTableUseEntityIds] = config.useEntityIds; + this[FMTableSchema] = config.schema; + this[FMTableFields] = config.fields; + this[FMTableNavigationPaths] = config.navigationPaths; + this[FMTableDefaultSelect] = config.defaultSelect; + this[FMTableBaseTableConfig] = config.baseTableConfig; + } +} + +/** + * Type helper to extract the column map from fields. + * Table name is baked into each column type for validation. + * Container fields are marked with IsContainer=true. + * Columns include both output type (for reading) and input type (for writing/filtering). + */ +export type ColumnMap< + TFields extends Record>, + TName extends string, +> = { + [K in keyof TFields]: Column< + InferFieldOutput, + InferFieldInput, + TName, + IsContainerField + >; +}; + +/** + * Extract only selectable (non-container) columns from a table. + * This is used to prevent selecting container fields in queries. + */ +export type SelectableColumnMap< + TFields extends Record>, + TName extends string, +> = { + [K in SelectableFieldKeys]: Column< + InferFieldOutput, + InferFieldInput, + TName, + false + >; +}; + +/** + * Validates that a select object doesn't contain container field columns. + * Returns never if any container fields are found, otherwise returns the original type. + */ +export type ValidateNoContainerFields< + TSelect extends Record>, +> = { + [K in keyof TSelect]: TSelect[K] extends Column + ? never + : TSelect[K]; +} extends TSelect + ? TSelect + : { + [K in keyof TSelect]: TSelect[K] extends Column + ? "❌ Container fields cannot be selected. Use .getSingleField() instead." + : TSelect[K]; + }; + +/** + * Extract the keys from a defaultSelect function's return type. + * Used to infer which fields are selected by default for type narrowing. + */ +type ExtractDefaultSelectKeys< + TDefaultSelect, + TFields extends Record>, + TName extends string, +> = TDefaultSelect extends (columns: ColumnMap) => infer R + ? keyof R + : TDefaultSelect extends "schema" + ? keyof TFields + : keyof TFields; // "all" defaults to all keys + +/** + * Complete table type with both metadata (via Symbols) and column accessors. + * This is the return type of fmTableOccurrence - users see columns directly, + * but internal config is hidden via Symbols. + */ +export type FMTableWithColumns< + TFields extends Record>, + TName extends string, + TNavigationPaths extends readonly string[] = readonly string[], +> = FMTable & ColumnMap; + +/** + * Options for fmTableOccurrence function. + * Provides autocomplete-friendly typing while preserving inference for navigationPaths. + */ +export interface FMTableOccurrenceOptions< + TFields extends Record>, + TName extends string, +> { + /** The entity ID (FMTID) for this table occurrence */ + entityId?: `FMTID:${string}`; + + /** + * Default select behavior: + * - "all": Select all fields (including related tables) + * - "schema": Select only schema-defined fields (default) + * - function: Custom selection from columns + */ + defaultSelect?: + | "all" + | "schema" + | (( + columns: ColumnMap, + ) => Record>); + + /** Navigation paths available from this table (for expand operations) */ + navigationPaths?: readonly string[]; + + /** Whether to use entity IDs (FMTID/FMFID) instead of names in queries */ + useEntityIds?: boolean; +} + +/** + * Create a table occurrence with field builders. + * This is the main API for defining tables in the new ORM style. + * + * @example + * const users = fmTableOccurrence("users", { + * id: textField().primaryKey().entityId("FMFID:1"), + * name: textField().notNull().entityId("FMFID:6"), + * active: numberField() + * .outputValidator(z.coerce.boolean()) + * .inputValidator(z.boolean().transform(v => v ? 1 : 0)) + * .entityId("FMFID:7"), + * }, { + * entityId: "FMTID:100", + * defaultSelect: "schema", + * navigationPaths: ["contacts"], + * }); + * + * // Access columns + * users.id // Column + * users.name // Column + * + * // Use in queries + * db.from(users).select("id", "name").where(eq(users.active, true)) + */ +export function fmTableOccurrence< + const TName extends string, + const TFields extends Record>, + const TNavPaths extends readonly string[] = readonly [], +>( + name: TName, + fields: TFields, + options?: FMTableOccurrenceOptions & { + /** Navigation paths available from this table (for expand operations) */ + navigationPaths?: TNavPaths; + }, +): FMTableWithColumns { + // Extract configuration from field builders + const fieldConfigs = Object.entries(fields).map(([fieldName, builder]) => ({ + fieldName, + config: (builder as any)._getConfig(), + })); + + // Find primary key field + const primaryKeyField = fieldConfigs.find((f) => f.config.primaryKey); + const idField = primaryKeyField?.fieldName; + + // Collect required fields (notNull fields) + const required = fieldConfigs + .filter((f) => f.config.notNull) + .map((f) => f.fieldName); + + // Collect read-only fields + const readOnly = fieldConfigs + .filter((f) => f.config.readOnly) + .map((f) => f.fieldName); + + // Collect container fields (cannot be selected via $select) + const containerFields = fieldConfigs + .filter((f) => f.config.fieldType === "container") + .map((f) => f.fieldName); + + // Collect entity IDs + const fmfIds: Record = {}; + for (const { fieldName, config } of fieldConfigs) { + if (config.entityId) { + fmfIds[fieldName] = config.entityId; + } + } + + // Build Zod schema from field builders (output/read validators) + const zodSchema: Record = {}; + // Build input schema from field builders (input/write validators) + const inputSchema: Record = {}; + + for (const { fieldName, config } of fieldConfigs) { + // Use outputValidator if provided, otherwise create a basic validator + if (config.outputValidator) { + zodSchema[fieldName] = config.outputValidator; + } else { + // Create a default validator based on field type and nullability + let validator: any; + switch (config.fieldType) { + case "text": + case "date": + case "time": + case "timestamp": + case "container": + case "calculated": + validator = z.string(); + break; + case "number": + validator = z.number(); + break; + default: + validator = z.unknown(); + } + + // Add nullability if not marked as notNull + if (!config.notNull) { + validator = validator.nullable(); + } + + zodSchema[fieldName] = validator; + } + + // Store inputValidator if provided (for write operations) + if (config.inputValidator) { + inputSchema[fieldName] = config.inputValidator; + } + } + + // Create a schema validator for the entire table + const tableSchema = z.object(zodSchema) as unknown as StandardSchemaV1< + any, + InferSchemaFromFields + >; + + // Build BaseTable-compatible config + const baseTableConfig = { + schema: zodSchema as Record, + inputSchema: (Object.keys(inputSchema).length > 0 + ? inputSchema + : undefined) as Record | undefined, + idField: idField as keyof TFields | undefined, + required: required as readonly (keyof TFields)[], + readOnly: readOnly as readonly (keyof TFields)[], + containerFields: containerFields as readonly (keyof TFields)[], + fmfIds: (Object.keys(fmfIds).length > 0 ? fmfIds : undefined) as + | Record + | undefined, + }; + + // Create column instances + const columns = {} as ColumnMap; + for (const [fieldName, builder] of Object.entries(fields)) { + const config = (builder as any)._getConfig(); + (columns as any)[fieldName] = new Column({ + fieldName: String(fieldName), + entityId: config.entityId, + tableName: name, + tableEntityId: options?.entityId, + inputValidator: config.inputValidator, + }); + } + + // Resolve defaultSelect: if it's a function, call it with columns; otherwise use as-is + const defaultSelectOption = options?.defaultSelect ?? "schema"; + const resolvedDefaultSelect: + | "all" + | "schema" + | Record> = + typeof defaultSelectOption === "function" + ? defaultSelectOption(columns as ColumnMap) + : defaultSelectOption; + + // Create the FMTable instance with Symbol-based internal properties + const navigationPaths = (options?.navigationPaths ?? []) as TNavPaths; + const table = new FMTable({ + name, + entityId: options?.entityId, + useEntityIds: options?.useEntityIds, + schema: tableSchema, + fields, + navigationPaths, + defaultSelect: resolvedDefaultSelect, + baseTableConfig, + }); + + // Assign columns to the table instance (making them accessible directly) + Object.assign(table, columns); + + return table as FMTableWithColumns; +} + +// /** +// * Type guard to check if a value is a TableOccurrence or FMTable. +// * Supports both Symbol-based (new) and underscore-prefixed (legacy) formats. +// */ +// function isTableOccurrence(value: any): value is TableOccurrence { +// if (!value || typeof value !== "object") { +// return false; +// } + +// // Check for Symbol-based format (new FMTable class) +// if ( +// FMTableName in value && +// FMTableSchema in value && +// FMTableFields in value +// ) { +// return typeof value[FMTableName] === "string"; +// } + +// // Check for underscore-prefixed format (legacy interface) +// if ("_name" in value && "_schema" in value && "_fields" in value) { +// return typeof value._name === "string"; +// } + +// return false; +// } + +/** + * Helper to extract the schema type from a TableOccurrence or FMTable. + */ +export type InferTableSchema = + T extends FMTable + ? InferSchemaFromFields + : never; + +/** + * Extract the schema type from an FMTable instance. + * This is used to infer the schema from table objects passed to db.from(), expand(), etc. + */ +export type InferSchemaOutputFromFMTable> = + T extends FMTable + ? InferSchemaFromFields + : never; + +/** + * Extract the input schema type from an FMTable instance. + * This is used for insert and update operations where we need write types. + */ +export type InferInputSchemaFromFMTable> = + T extends FMTable + ? InferInputSchemaFromFields + : never; + +/** + * Helper type to check if a FieldBuilder's input type excludes null and undefined. + * This checks the TInput type parameter, which preserves nullability from notNull(). + */ +type FieldInputExcludesNullish = + F extends FieldBuilder + ? null extends TInput + ? false + : undefined extends TInput + ? false + : true + : false; + +/** + * Check if a FieldBuilder is readOnly at the type level + */ +type IsFieldReadOnly = + F extends FieldBuilderType + ? ReadOnly extends true + ? true + : false + : false; + +/** + * Compute insert data type from FMTable, making notNull fields required. + * Fields are required if their FieldBuilder's TInput type excludes null/undefined. + * All other fields are optional (can be omitted). + * readOnly fields are excluded (including primaryKey/idField since they're automatically readOnly). + */ +export type InsertDataFromFMTable> = + T extends FMTable + ? { + [K in keyof TFields as IsFieldReadOnly extends true + ? never + : FieldInputExcludesNullish extends true + ? K + : never]: InferFieldInput; + } & { + [K in keyof TFields as IsFieldReadOnly extends true + ? never + : FieldInputExcludesNullish extends true + ? never + : K]?: InferFieldInput; + } + : never; + +/** + * Compute update data type from FMTable. + * All fields are optional, but readOnly fields are excluded (including primaryKey/idField). + */ +export type UpdateDataFromFMTable> = + T extends FMTable + ? { + [K in keyof TFields as IsFieldReadOnly extends true + ? never + : K]?: InferFieldInput; + } + : never; + +/** + * Extract the table name type from an FMTable. + * This is a workaround since we can't directly index Symbols in types. + */ +export type ExtractTableName> = + T extends FMTable ? Name : never; + +/** + * Validates that a target table's name matches one of the source table's navigationPaths. + * Used to ensure type-safe expand/navigate operations. + */ +export type ValidExpandTarget< + SourceTable extends FMTable | undefined, + TargetTable extends FMTable, +> = + SourceTable extends FMTable + ? ExtractTableName extends SourceNavPaths[number] + ? TargetTable + : never + : TargetTable; + +// ============================================================================ +// Helper Functions for Accessing FMTable Internal Properties +// ============================================================================ + +/** + * Get the table name from an FMTable instance. + * @param table - FMTable instance + * @returns The table name + */ +export function getTableName>(table: T): string { + return table[FMTableName]; +} + +/** + * Get the entity ID (FMTID) from an FMTable instance. + * @param table - FMTable instance + * @returns The entity ID or undefined if not using entity IDs + */ +export function getTableEntityId>( + table: T, +): string | undefined { + return table[FMTableEntityId]; +} + +/** + * Get the schema validator from an FMTable instance. + * @param table - FMTable instance + * @returns The StandardSchemaV1 validator + */ +export function getTableSchema>( + table: T, +): StandardSchemaV1 { + return table[FMTableSchema]; +} + +/** + * Get the fields from an FMTable instance. + * @param table - FMTable instance + * @returns The fields record + */ +export function getTableFields>(table: T) { + return table[FMTableFields]; +} + +/** + * Get the navigation paths from an FMTable instance. + * @param table - FMTable instance + * @returns Array of navigation path names + */ +export function getNavigationPaths>( + table: T, +): readonly string[] { + return table[FMTableNavigationPaths]; +} + +/** + * Get the default select configuration from an FMTable instance. + * @param table - FMTable instance + * @returns Default select configuration + */ +export function getDefaultSelect>(table: T) { + return table[FMTableDefaultSelect]; +} + +/** + * Get the base table configuration from an FMTable instance. + * This provides access to schema, idField, required fields, readOnly fields, and field IDs. + * @param table - FMTable instance + * @returns Base table configuration object + */ +export function getBaseTableConfig>(table: T) { + return table[FMTableBaseTableConfig]; +} + +/** + * Check if an FMTable instance is using entity IDs (both FMTID and FMFIDs). + * @param table - FMTable instance + * @returns True if using entity IDs, false otherwise + */ +export function isUsingEntityIds>( + table: T, +): boolean { + return ( + table[FMTableEntityId] !== undefined && + table[FMTableBaseTableConfig].fmfIds !== undefined + ); +} + +/** + * Get the field ID (FMFID) for a given field name, or the field name itself if not using IDs. + * @param table - FMTable instance + * @param fieldName - Field name to get the ID for + * @returns The FMFID string or the original field name + */ +export function getFieldId>( + table: T, + fieldName: string, +): string { + const config = table[FMTableBaseTableConfig]; + if (config.fmfIds && fieldName in config.fmfIds) { + const fieldId = config.fmfIds[fieldName]; + if (fieldId) { + return fieldId; + } + } + return fieldName; +} + +/** + * Get the field name for a given field ID (FMFID), or the ID itself if not found. + * @param table - FMTable instance + * @param fieldId - The FMFID to get the field name for + * @returns The field name or the original ID + */ +export function getFieldName>( + table: T, + fieldId: string, +): string { + const config = table[FMTableBaseTableConfig]; + if (config.fmfIds) { + for (const [fieldName, fmfId] of Object.entries(config.fmfIds)) { + if (fmfId === fieldId) { + return fieldName; + } + } + } + return fieldId; +} +/** + * Get the table ID (FMTID or name) from an FMTable instance. + * Returns the FMTID if available, otherwise returns the table name. + * @param table - FMTable instance + * @returns The FMTID string or the table name + */ +export function getTableId>(table: T): string { + return table[FMTableEntityId] ?? table[FMTableName]; +} + +/** + * Get all columns from a table as an object. + * Useful for selecting all fields except some using destructuring. + * + * @example + * const { password, ...cols } = getTableColumns(users) + * db.from(users).list().select(cols) + * + * @param table - FMTable instance + * @returns Object with all columns from the table + */ +export function getTableColumns>( + table: T, +): ColumnMap> { + const fields = table[FMTableFields]; + const tableName = table[FMTableName]; + const tableEntityId = table[FMTableEntityId]; + const baseConfig = table[FMTableBaseTableConfig]; + + const columns = {} as ColumnMap>; + for (const [fieldName, builder] of Object.entries(fields)) { + const config = (builder as any)._getConfig(); + (columns as any)[fieldName] = new Column({ + fieldName: String(fieldName), + entityId: baseConfig.fmfIds?.[fieldName], + tableName: tableName, + tableEntityId: tableEntityId, + inputValidator: config.inputValidator, + }); + } + + return columns; +} diff --git a/packages/fmodata/src/transform.ts b/packages/fmodata/src/transform.ts index 30999bc4..64e8db7e 100644 --- a/packages/fmodata/src/transform.ts +++ b/packages/fmodata/src/transform.ts @@ -1,24 +1,32 @@ -import type { BaseTable } from "./client/base-table"; -import type { TableOccurrence } from "./client/table-occurrence"; +import type { FMTable } from "./orm/table"; +import { + getBaseTableConfig, + getFieldId, + getFieldName, + getTableId, + getTableName, + isUsingEntityIds, +} from "./orm/table"; import type { StandardSchemaV1 } from "@standard-schema/spec"; /** * Transforms field names to FileMaker field IDs (FMFID) in an object * @param data - Object with field names as keys - * @param baseTable - BaseTable instance to get field IDs from + * @param table - FMTable instance to get field IDs from * @returns Object with FMFID keys instead of field names */ export function transformFieldNamesToIds>( data: T, - baseTable: BaseTable, + table: FMTable, ): Record { - if (!baseTable.isUsingFieldIds()) { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { return data; } const transformed: Record = {}; for (const [fieldName, value] of Object.entries(data)) { - const fieldId = baseTable.getFieldId(fieldName as any); + const fieldId = getFieldId(table, fieldName); transformed[fieldId] = value; } return transformed; @@ -27,14 +35,15 @@ export function transformFieldNamesToIds>( /** * Transforms FileMaker field IDs (FMFID) to field names in an object * @param data - Object with FMFID keys - * @param baseTable - BaseTable instance to get field names from + * @param table - FMTable instance to get field names from * @returns Object with field names as keys instead of FMFIDs */ export function transformFieldIdsToNames>( data: T, - baseTable: BaseTable, + table: FMTable, ): Record { - if (!baseTable.isUsingFieldIds()) { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { return data; } @@ -46,7 +55,7 @@ export function transformFieldIdsToNames>( continue; } - const fieldName = baseTable.getFieldName(key); + const fieldName = getFieldName(table, key); transformed[fieldName] = value; } return transformed; @@ -55,38 +64,36 @@ export function transformFieldIdsToNames>( /** * Transforms a field name to FMFID or returns the field name if not using IDs * @param fieldName - The field name to transform - * @param baseTable - BaseTable instance to get field ID from + * @param table - FMTable instance to get field ID from * @returns The FMFID or field name */ export function transformFieldName( fieldName: string, - baseTable: BaseTable, + table: FMTable, ): string { - return baseTable.getFieldId(fieldName as any); + return getFieldId(table, fieldName); } /** - * Transforms a table occurrence name to FMTID or returns the name if not using IDs - * @param occurrence - TableOccurrence instance to get table ID from + * Transforms a table name to FMTID or returns the name if not using IDs + * @param table - FMTable instance to get table ID from * @returns The FMTID or table name */ -export function transformTableName( - occurrence: TableOccurrence, -): string { - return occurrence.getTableId(); +export function transformTableName(table: FMTable): string { + return getTableId(table); } /** - * Gets both table name and ID from an occurrence - * @param occurrence - TableOccurrence instance + * Gets both table name and ID from a table + * @param table - FMTable instance * @returns Object with name (always present) and id (may be undefined if not using IDs) */ export function getTableIdentifiers( - occurrence: TableOccurrence, + table: FMTable, ): { name: string; id: string | undefined } { return { - name: occurrence.getTableName(), - id: occurrence.isUsingTableId() ? occurrence.getTableId() : undefined, + name: getTableName(table), + id: isUsingEntityIds(table) ? getTableId(table) : undefined, }; } @@ -95,19 +102,20 @@ export function getTableIdentifiers( * Handles both single records and arrays of records, as well as nested expand relationships. * * @param data - Response data from FileMaker (can be single record, array, or wrapped in value property) - * @param baseTable - BaseTable instance for the main table + * @param table - FMTable instance for the main table * @param expandConfigs - Configuration for expanded relations (optional) * @returns Transformed data with field names instead of IDs */ export function transformResponseFields( data: any, - baseTable: BaseTable, + table: FMTable, expandConfigs?: Array<{ relation: string; - occurrence?: TableOccurrence; + table?: FMTable; }>, ): any { - if (!baseTable.isUsingFieldIds()) { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { return data; } @@ -121,7 +129,7 @@ export function transformResponseFields( return { ...data, value: data.value.map((record: any) => - transformSingleRecord(record, baseTable, expandConfigs), + transformSingleRecord(record, table, expandConfigs), ), }; } @@ -129,12 +137,12 @@ export function transformResponseFields( // Handle array of records if (Array.isArray(data)) { return data.map((record) => - transformSingleRecord(record, baseTable, expandConfigs), + transformSingleRecord(record, table, expandConfigs), ); } // Handle single record - return transformSingleRecord(data, baseTable, expandConfigs); + return transformSingleRecord(data, table, expandConfigs); } /** @@ -142,10 +150,10 @@ export function transformResponseFields( */ function transformSingleRecord( record: any, - baseTable: BaseTable, + table: FMTable, expandConfigs?: Array<{ relation: string; - occurrence?: TableOccurrence; + table?: FMTable; }>, ): any { if (!record || typeof record !== "object") { @@ -169,13 +177,11 @@ function transformSingleRecord( if (!expandConfig && key.startsWith("FMTID:")) { expandConfig = expandConfigs?.find( (ec) => - ec.occurrence && - ec.occurrence.isUsingTableId() && - ec.occurrence.getTableId() === key, + ec.table && isUsingEntityIds(ec.table) && getTableId(ec.table) === key, ); } - if (expandConfig && expandConfig.occurrence) { + if (expandConfig && expandConfig.table) { // Transform the expanded relation data recursively // Use the relation name (not the FMTID) as the key const relationKey = expandConfig.relation; @@ -184,14 +190,14 @@ function transformSingleRecord( transformed[relationKey] = value.map((nestedRecord) => transformSingleRecord( nestedRecord, - expandConfig.occurrence!.baseTable, + expandConfig.table!, undefined, // Don't pass nested expand configs for now ), ); } else if (value && typeof value === "object") { transformed[relationKey] = transformSingleRecord( value, - expandConfig.occurrence.baseTable, + expandConfig.table, undefined, ); } else { @@ -201,7 +207,7 @@ function transformSingleRecord( } // Transform field ID to field name - const fieldName = baseTable.getFieldName(key); + const fieldName = getFieldName(table, key); transformed[fieldName] = value; } @@ -211,39 +217,47 @@ function transformSingleRecord( /** * Transforms an array of field names to FMFIDs * @param fieldNames - Array of field names - * @param baseTable - BaseTable instance to get field IDs from + * @param table - FMTable instance to get field IDs from * @returns Array of FMFIDs or field names */ export function transformFieldNamesArray( fieldNames: string[], - baseTable: BaseTable, + table: FMTable, ): string[] { - if (!baseTable.isUsingFieldIds()) { + const config = getBaseTableConfig(table); + if (!config.fmfIds) { return fieldNames; } - return fieldNames.map((fieldName) => baseTable.getFieldId(fieldName as any)); + return fieldNames.map((fieldName) => getFieldId(table, fieldName)); } /** * Transforms a field name in an orderBy string (e.g., "name desc" -> "FMFID:1 desc") * @param orderByString - The orderBy string (field name with optional asc/desc) - * @param baseTable - BaseTable instance to get field ID from + * @param table - FMTable instance to get field ID from * @returns Transformed orderBy string with FMFID */ export function transformOrderByField( orderByString: string, - baseTable: BaseTable, + table: FMTable | undefined, ): string { - if (!baseTable.isUsingFieldIds()) { + if (!table) { + return orderByString; + } + const config = getBaseTableConfig(table); + if (!config || !config.fmfIds) { return orderByString; } // Parse the orderBy string to extract field name and direction const parts = orderByString.trim().split(/\s+/); const fieldName = parts[0]; + if (!fieldName) { + return orderByString; + } const direction = parts[1]; // "asc" or "desc" or undefined - const fieldId = baseTable.getFieldId(fieldName as any); + const fieldId = getFieldId(table, fieldName); return direction ? `${fieldId} ${direction}` : fieldId; } diff --git a/packages/fmodata/src/types.ts b/packages/fmodata/src/types.ts index 79497caf..4c310d0d 100644 --- a/packages/fmodata/src/types.ts +++ b/packages/fmodata/src/types.ts @@ -1,5 +1,6 @@ import { type FFetchOptions } from "@fetchkit/ffetch"; import type { StandardSchemaV1 } from "@standard-schema/spec"; +import type { InternalLogger } from "./logger"; export type Auth = { username: string; password: string } | { apiKey: string }; @@ -36,6 +37,7 @@ export interface ExecutionContext { _setUseEntityIds?(useEntityIds: boolean): void; _getUseEntityIds?(): boolean; _getBaseUrl?(): string; + _getLogger?(): InternalLogger; } export type InferSchemaType> = { @@ -149,69 +151,6 @@ type ComputeInsertData< Exclude, ExcludedFields> >; -// Extract insert data type from BaseTable -// Auto-infers required fields from validator nullability + user-specified required fields -// Excludes readOnly fields and idField -export type InsertData = BT extends import("./client/base-table").BaseTable< - any, - any, - any, - any -> - ? BT extends { - schema: infer Schema; - idField?: infer IdField; - required?: infer Required; - readOnly?: infer ReadOnly; - } - ? Schema extends Record - ? IdField extends keyof Schema | undefined - ? Required extends readonly any[] - ? ReadOnly extends readonly any[] - ? ComputeInsertData< - Schema, - Extract, - Required, - ReadOnly - > - : Partial> - : Partial> - : Partial> - : Partial> - : Partial> - : Partial>; - -// Extract update data type from BaseTable -// All fields are optional for updates, excludes readOnly fields and idField -export type UpdateData = BT extends import("./client/base-table").BaseTable< - any, - any, - any, - any -> - ? BT extends { - schema: infer Schema; - idField?: infer IdField; - readOnly?: infer ReadOnly; - } - ? Schema extends Record - ? IdField extends keyof Schema | undefined - ? ReadOnly extends readonly any[] - ? Partial< - Omit< - InferSchemaType, - ExcludedFields< - Extract, - ReadOnly - > - > - > - : Partial> - : Partial> - : Partial> - : Partial> - : Partial>; - export type ExecuteOptions = { includeODataAnnotations?: boolean; skipValidation?: boolean; @@ -221,6 +160,38 @@ export type ExecuteOptions = { useEntityIds?: boolean; }; +/** + * Type for the fetchHandler callback function. + * This is a convenience type export that matches the fetchHandler signature in FFetchOptions. + * + * @example + * ```typescript + * import type { FetchHandler } from '@proofkit/fmodata'; + * + * const myFetchHandler: FetchHandler = (input, init) => { + * console.log('Custom fetch:', input); + * return fetch(input, init); + * }; + * + * await query.execute({ + * fetchHandler: myFetchHandler + * }); + * ``` + */ +export type FetchHandler = ( + input: RequestInfo | URL, + init?: RequestInit, +) => Promise; + +/** + * Combined type for execute() method options. + * + * Uses FFetchOptions from @fetchkit/ffetch to ensure proper type inference. + * FFetchOptions is re-exported in the package to ensure type availability in consuming packages. + */ +export type ExecuteMethodOptions = + RequestInit & FFetchOptions & ExecuteOptions & EO; + /** * Get the Accept header value based on includeODataAnnotations option * @param includeODataAnnotations - Whether to include OData annotations @@ -242,7 +213,7 @@ export type ConditionallyWithODataAnnotations< } : T; -// Helper type to extract schema from a TableOccurrence +// Helper type to extract schema from a FMTable export type ExtractSchemaFromOccurrence = Occ extends { baseTable: { schema: infer S }; } diff --git a/packages/fmodata/src/validation.ts b/packages/fmodata/src/validation.ts index fe181858..116ba375 100644 --- a/packages/fmodata/src/validation.ts +++ b/packages/fmodata/src/validation.ts @@ -1,19 +1,91 @@ import type { ODataRecordMetadata } from "./types"; import { StandardSchemaV1 } from "@standard-schema/spec"; -import type { TableOccurrence } from "./client/table-occurrence"; +import type { FMTable } from "./orm/table"; import { ValidationError, ResponseStructureError, RecordCountMismatchError, } from "./errors"; +/** + * Validates and transforms input data for insert/update operations. + * Applies input validators (writeValidators) to transform user input to database format. + * Fields without input validators are passed through unchanged. + * + * @param data - The input data to validate and transform + * @param inputSchema - Optional schema containing input validators for each field + * @returns Transformed data ready to send to the server + * @throws ValidationError if any field fails validation + */ +export async function validateAndTransformInput>( + data: Partial, + inputSchema?: Record, +): Promise> { + // If no input schema, return data as-is + if (!inputSchema) { + return data; + } + + const transformedData: Record = { ...data }; + + // Process each field that has an input validator + for (const [fieldName, fieldSchema] of Object.entries(inputSchema)) { + // Only process fields that are present in the input data + if (fieldName in data) { + const inputValue = data[fieldName]; + + try { + // Run the input validator to transform the value + let result = fieldSchema["~standard"].validate(inputValue); + if (result instanceof Promise) { + result = await result; + } + + // Check for validation errors + if (result.issues) { + throw new ValidationError( + `Input validation failed for field '${fieldName}'`, + result.issues, + { + field: fieldName, + value: inputValue, + cause: result.issues, + }, + ); + } + + // Store the transformed value + transformedData[fieldName] = result.value; + } catch (error) { + // If it's already a ValidationError, re-throw it + if (error instanceof ValidationError) { + throw error; + } + + // Otherwise, wrap the error + throw new ValidationError( + `Input validation failed for field '${fieldName}'`, + [], + { + field: fieldName, + value: inputValue, + cause: error, + }, + ); + } + } + } + + // Fields without input validators are already in transformedData (passed through) + return transformedData as Partial; +} + // Type for expand validation configuration export type ExpandValidationConfig = { relation: string; targetSchema?: Record; - targetOccurrence?: TableOccurrence; - targetBaseTable?: any; // BaseTable instance for transformation - occurrence?: TableOccurrence; // For transformation + targetTable?: FMTable; + table?: FMTable; // For transformation selectedFields?: string[]; nestedExpands?: ExpandValidationConfig[]; }; diff --git a/packages/fmodata/tests/batch-error-messages.test.ts b/packages/fmodata/tests/batch-error-messages.test.ts index 57c7a25a..32734e7e 100644 --- a/packages/fmodata/tests/batch-error-messages.test.ts +++ b/packages/fmodata/tests/batch-error-messages.test.ts @@ -11,12 +11,11 @@ import { describe, it, expect } from "vitest"; import { z } from "zod/v4"; import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, + fmTableOccurrence, + textField, isODataError, isResponseStructureError, -} from "../src/index"; +} from "@proofkit/fmodata"; import { createMockClient } from "./utils/test-setup"; /** @@ -50,27 +49,12 @@ describe("Batch Error Messages - Improved Error Parsing", () => { const client = createMockClient(); // Define simple schemas for batch testing - const addressesBase = defineBaseTable({ - schema: { - id: z.string(), - street: z.string().nullable(), - }, - idField: "id", + const addressesTO = fmTableOccurrence("addresses", { + id: textField().primaryKey(), + street: textField(), }); - const _addressesTO = defineTableOccurrence({ - name: "addresses", - baseTable: addressesBase, - }); - - const [addressesTO] = buildOccurrences({ - occurrences: [_addressesTO], - navigation: {}, - }); - - const db = client.database("test_db", { - occurrences: [addressesTO], - }); + const db = client.database("test_db"); it("should return ODataError with helpful message instead of vague ResponseStructureError", async () => { // This simulates the exact scenario from the user's error: @@ -118,9 +102,9 @@ describe("Batch Error Messages - Improved Error Parsing", () => { ].join("\r\n"); // Create three queries (simulating user's punchlistQuery, purchaseOrdersQuery, ticketsQuery) - const query1 = db.from("addresses").list(); - const query2 = db.from("addresses").list(); // Will fail with 404 in mock - const query3 = db.from("addresses").list(); + const query1 = db.from(addressesTO).list(); + const query2 = db.from(addressesTO).list(); // Will fail with 404 in mock + const query3 = db.from(addressesTO).list(); // Execute batch with mock const result = await db.batch([query1, query2, query3]).execute({ @@ -184,7 +168,7 @@ describe("Batch Error Messages - Improved Error Parsing", () => { "--batch_boundary--", ].join("\r\n"); - const badQuery = db.from("addresses").list(); + const badQuery = db.from(addressesTO).list(); const result = await db.batch([badQuery]).execute({ fetchHandler: createBatchMockFetch(mockBatchResponse), @@ -208,4 +192,3 @@ describe("Batch Error Messages - Improved Error Parsing", () => { } }); }); - diff --git a/packages/fmodata/tests/batch.test.ts b/packages/fmodata/tests/batch.test.ts index 89540340..486dcf75 100644 --- a/packages/fmodata/tests/batch.test.ts +++ b/packages/fmodata/tests/batch.test.ts @@ -8,14 +8,15 @@ import { describe, it, expect } from "vitest"; import { z } from "zod/v4"; import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, + fmTableOccurrence, + textField, BatchTruncatedError, isBatchTruncatedError, isODataError, ODataError, -} from "../src/index"; + eq, + isNotNull, +} from "@proofkit/fmodata"; import { createMockClient } from "./utils/test-setup"; /** @@ -49,41 +50,18 @@ describe("Batch Operations - Mock Tests", () => { const client = createMockClient(); // Define simple schemas for batch testing - const contactsBase = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - name: z.string().nullable(), - hobby: z.string().nullable(), - }, - idField: "PrimaryKey", + const contactsTO = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + name: textField(), + hobby: textField(), }); - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string().nullable(), - }, - idField: "id", + const usersTO = fmTableOccurrence("users", { + id: textField().primaryKey(), + name: textField(), }); - const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - }); - - const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - }); - - const [contactsTO, usersTO] = buildOccurrences({ - occurrences: [_contactsTO, _usersTO], - navigation: {}, - }); - - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); describe("Mixed success/failure responses", () => { it("should handle batch response where first succeeds, second fails (404), and third is truncated", async () => { @@ -127,12 +105,18 @@ describe("Batch Operations - Mock Tests", () => { ].join("\r\n"); // Create three queries - const query1 = db.from("contacts").list().filter({ hobby: "Testing" }); - const query2 = db.from("users").list().filter({ name: "NonExistent" }); + const query1 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.hobby, "Testing")); + const query2 = db + .from(usersTO) + .list() + .where(eq(usersTO.name, "NonExistent")); const query3 = db - .from("contacts") + .from(contactsTO) .list() - .filter({ name: { ne: null } }); + .where(isNotNull(contactsTO.name)); // Execute batch with mock const result = await db.batch([query1, query2, query3]).execute({ @@ -226,9 +210,15 @@ describe("Batch Operations - Mock Tests", () => { "--b_success_boundary--", ].join("\r\n"); - const query1 = db.from("contacts").list().filter({ hobby: "Reading" }); - const query2 = db.from("users").list().top(1); - const query3 = db.from("contacts").list().filter({ hobby: "Gaming" }); + const query1 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.hobby, "Reading")); + const query2 = db.from(usersTO).list().top(1); + const query3 = db + .from(contactsTO) + .list() + .where(eq(contactsTO.hobby, "Gaming")); const result = await db.batch([query1, query2, query3]).execute({ fetchHandler: createBatchMockFetch(mockBatchResponse), @@ -303,9 +293,12 @@ describe("Batch Operations - Mock Tests", () => { "--b_empty_boundary--", ].join("\r\n"); - const query1 = db.from("contacts").list().top(1); - const query2 = db.from("users").list().filter({ name: "NonExistent" }); - const query3 = db.from("contacts").list().top(1); + const query1 = db.from(contactsTO).list().top(1); + const query2 = db + .from(usersTO) + .list() + .where(eq(usersTO.name, "NonExistent")); + const query3 = db.from(contactsTO).list().top(1); const result = await db.batch([query1, query2, query3]).execute({ fetchHandler: createBatchMockFetch(mockBatchResponse), diff --git a/packages/fmodata/tests/delete.test.ts b/packages/fmodata/tests/delete.test.ts index 46a4f673..6c236bc1 100644 --- a/packages/fmodata/tests/delete.test.ts +++ b/packages/fmodata/tests/delete.test.ts @@ -7,82 +7,71 @@ import { describe, it, expect, expectTypeOf, vi } from "vitest"; import { z } from "zod/v4"; -import { defineBaseTable, defineTableOccurrence } from "../src/index"; -import { InferSchemaType } from "../src/types"; -import { DeleteBuilder } from "../src/client/delete-builder"; -import { ExecutableDeleteBuilder } from "../src/client/delete-builder"; +import { + fmTableOccurrence, + textField, + numberField, + type InferTableSchema, + eq, + and, + lt, +} from "@proofkit/fmodata"; +import { DeleteBuilder } from "@proofkit/fmodata/client/delete-builder"; +import { ExecutableDeleteBuilder } from "@proofkit/fmodata/client/delete-builder"; import { simpleMock } from "./utils/mock-fetch"; import { createMockClient } from "./utils/test-setup"; describe("delete method", () => { const client = createMockClient(); - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - active: z.boolean(), - lastLogin: z.string().optional(), - }, - idField: "id", + const usersTO = fmTableOccurrence("users", { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + lastLogin: textField(), }); - const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - }); - - type UserSchema = InferSchemaType; + type UserSchema = InferTableSchema; describe("builder pattern", () => { it("should return DeleteBuilder when delete() is called", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - const result = db.from("users").delete(); + const result = db.from(usersTO).delete(); expect(result).toBeInstanceOf(DeleteBuilder); }); it("should not have execute() on initial DeleteBuilder", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - const deleteBuilder = db.from("users").delete(); + const deleteBuilder = db.from(usersTO).delete(); // Type check: execute should not exist on DeleteBuilder expectTypeOf(deleteBuilder).not.toHaveProperty("execute"); }); it("should return ExecutableDeleteBuilder after byId()", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - const result = db.from("users").delete().byId("user-123"); + const result = db.from(usersTO).delete().byId("user-123"); expect(result).toBeInstanceOf(ExecutableDeleteBuilder); }); it("should return ExecutableDeleteBuilder after where()", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = db - .from("users") + .from(usersTO) .delete() - .where((q) => q.filter({ active: false })); + .where((q) => q.where(eq(usersTO.active, 0))); expect(result).toBeInstanceOf(ExecutableDeleteBuilder); }); it("should have execute() on ExecutableDeleteBuilder", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - const executableBuilder = db.from("users").delete().byId("user-123"); + const executableBuilder = db.from(usersTO).delete().byId("user-123"); // Type check: execute should exist expectTypeOf(executableBuilder).toHaveProperty("execute"); @@ -91,11 +80,9 @@ describe("delete method", () => { describe("delete by ID", () => { it("should generate correct URL for delete by ID", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - const deleteBuilder = db.from("users").delete().byId("user-123"); + const deleteBuilder = db.from(usersTO).delete().byId("user-123"); const config = deleteBuilder.getRequestConfig(); expect(config.method).toBe("DELETE"); @@ -103,11 +90,9 @@ describe("delete method", () => { }); it("should return deletedCount result type", async () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - db.from("users").delete().byId("user-123"); + db.from(usersTO).delete().byId("user-123"); }); it("should execute delete by ID and return count", async () => { @@ -118,12 +103,10 @@ describe("delete method", () => { body: null, }); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(usersTO) .delete() .byId("user-123") .execute({ fetchHandler: mockFetch }); @@ -135,14 +118,12 @@ describe("delete method", () => { describe("delete by filter", () => { it("should generate correct URL for delete by filter", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const deleteBuilder = db - .from("users") + .from(usersTO) .delete() - .where((q) => q.filter({ active: false })); + .where((q) => q.where(eq(usersTO.active, 0))); const config = deleteBuilder.getRequestConfig(); @@ -153,17 +134,15 @@ describe("delete method", () => { }); it("should support complex filters with QueryBuilder", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const deleteBuilder = db - .from("users") + .from(usersTO) .delete() .where((q) => - q.filter({ - and: [{ active: false }, { lastLogin: { lt: "2023-01-01" } }], - }), + q.where( + and(eq(usersTO.active, 0), lt(usersTO.lastLogin, "2023-01-01")), + ), ); const config = deleteBuilder.getRequestConfig(); @@ -173,14 +152,12 @@ describe("delete method", () => { }); it("should support QueryBuilder chaining in where callback", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const deleteBuilder = db - .from("users") + .from(usersTO) .delete() - .where((q) => q.filter({ active: false }).top(10)); + .where((q) => q.where(eq(usersTO.active, 0)).top(10)); const config = deleteBuilder.getRequestConfig(); @@ -190,14 +167,12 @@ describe("delete method", () => { }); it("should return deletedCount result type for filter-based delete", async () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); - db.from("users"); + const db = client.database("test_db"); + db.from(usersTO); - db.from("users") + db.from(usersTO) .delete() - .where((q) => q.filter({ active: false })); + .where((q) => q.where(eq(usersTO.active, 0))); }); it("should execute delete by filter and return count", async () => { @@ -208,14 +183,12 @@ describe("delete method", () => { body: null, }); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(usersTO) .delete() - .where((q) => q.filter({ active: false })) + .where((q) => q.where(eq(usersTO.active, 0))) .execute({ fetchHandler: mockFetch }); expect(result.error).toBeUndefined(); @@ -225,39 +198,27 @@ describe("delete method", () => { describe("type safety", () => { it("should enforce type-safe filter properties", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); // This should work - valid property - db.from("users") + db.from(usersTO) .delete() - .where((q) => q.filter({ active: false })); - - // Type check: TypeScript should allow valid field names - expectTypeOf( - db - .from("users") - .delete() - .where((q) => q.filter({ active: false })), - ).toEqualTypeOf>(); + .where((q) => q.where(eq(usersTO.active, 0))); }); it("should provide type-safe QueryBuilder in where callback", () => { - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); - db.from("users") + db.from(usersTO) .delete() .where((q) => { - // Type check: q should have filter, orderBy, top, skip methods - expectTypeOf(q).toHaveProperty("filter"); + // Type check: q should have where, orderBy, top, skip methods + expectTypeOf(q).toHaveProperty("where"); expectTypeOf(q).toHaveProperty("orderBy"); expectTypeOf(q).toHaveProperty("top"); expectTypeOf(q).toHaveProperty("skip"); - return q.filter({ active: false }); + return q.where(eq(usersTO.active, 0)); }); }); }); @@ -266,12 +227,10 @@ describe("delete method", () => { it("should return error on failed delete", async () => { const mockFetch = vi.fn().mockRejectedValue(new Error("Network error")); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(usersTO) .delete() .byId("user-123") .execute({ fetchHandler: mockFetch as any }); diff --git a/packages/fmodata/tests/e2e.test.ts b/packages/fmodata/tests/e2e.test.ts index befcdcb4..5431c46e 100644 --- a/packages/fmodata/tests/e2e.test.ts +++ b/packages/fmodata/tests/e2e.test.ts @@ -6,7 +6,15 @@ */ import { describe, it, afterEach, expect, assert, expectTypeOf } from "vitest"; -import { FMServerConnection, Metadata } from "../src/index"; +import { + FMServerConnection, + fmTableOccurrence, + Metadata, + textField, + contains, + eq, + isNotNull, +} from "@proofkit/fmodata"; import { jsonCodec } from "./utils/helpers"; import { z } from "zod/v4"; import { mockResponses } from "./fixtures/responses"; @@ -17,13 +25,9 @@ import { password, apiKey, database, - contactsTO, - usersTO, + contacts, + users, contactsTOWithIds, - usersTOWithIds, - contactsTOForBatch, - usersTOForBatch, - occurrencesWithIds, } from "./e2e/setup"; if (!serverUrl) { @@ -45,11 +49,9 @@ afterEach(async () => { serverUrl: serverUrl!, auth: { apiKey }, }); - const db = connection.database(database!, { - occurrences: [contactsTO, usersTO] as const, - }); + const db = connection.database(database!); - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Delete records by ID for (const recordId of createdRecordIds) { @@ -67,7 +69,7 @@ afterEach(async () => { try { await entitySet .delete() - .where((q) => q.filter({ name: { contains: marker } })) + .where((q) => q.where(contains(contacts.name, marker))) .execute(); } catch (error) { // Ignore errors - records may have already been deleted @@ -87,12 +89,10 @@ describe("Basic E2E Operations", () => { serverUrl: serverUrl!, auth: { apiKey }, }); - const db = connection.database(database!, { - occurrences: [contactsTO, usersTO] as const, - }); + const db = connection.database(database!); it("should connect to the server and list records", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Test basic list query (limit to 10 records to avoid timeout) const result = await entitySet.list().top(10).execute(); @@ -100,6 +100,7 @@ describe("Basic E2E Operations", () => { console.log(result.error); throw new Error("Expected data to be defined"); } + assert(result.data, "Expected data to be defined"); // Verify we got a response @@ -139,7 +140,7 @@ describe("Basic E2E Operations", () => { }); it("should insert a record and verify count increased", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Get initial count const initialCountResult = await entitySet.list().count().execute(); @@ -177,7 +178,7 @@ describe("Basic E2E Operations", () => { }); it("should update a record by ID and return count", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // First, insert a record to update const uniqueName = `Update Test ${Date.now()}`; @@ -207,7 +208,7 @@ describe("Basic E2E Operations", () => { }); it("should update multiple records by filter and return count", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Insert multiple records with a unique marker const marker = `Bulk Update ${Date.now()}`; @@ -221,7 +222,7 @@ describe("Basic E2E Operations", () => { // Update all records with the marker const updateResult = await entitySet .update({ hobby: "Updated Hobby" }) - .where((q) => q.filter({ name: { contains: marker } })) + .where((q) => q.where(contains(contacts.name, marker))) .execute(); assert(updateResult.data, "Expected update data to be defined"); @@ -230,7 +231,7 @@ describe("Basic E2E Operations", () => { }); it("should delete a record by ID and return count", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // First, insert a record to delete const uniqueName = `Delete Test ${Date.now()}`; @@ -263,7 +264,7 @@ describe("Basic E2E Operations", () => { }); it("should delete multiple records by filter and return count", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Insert multiple records with a unique marker const marker = `Bulk Delete ${Date.now()}`; @@ -278,7 +279,7 @@ describe("Basic E2E Operations", () => { // Delete all records with the marker const deleteResult = await entitySet .delete() - .where((q) => q.filter({ name: { contains: marker } })) + .where((q) => q.where(contains(contacts.name, marker))) .execute(); assert(deleteResult.data, "Expected delete data to be defined"); @@ -294,12 +295,12 @@ describe("Basic E2E Operations", () => { }); it("should properly type and validate expanded properties", async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Test expand with type safety const result = await entitySet .list() - .expand("users", (b) => b.select("name")) + .expand(users, (b: any) => b.select({ name: users.name })) .execute(); // Verify we got a response @@ -315,13 +316,15 @@ describe("Basic E2E Operations", () => { expect(firstRecord.users.length).toBeGreaterThan(0); }); - it("should validate all fields in the expand are valid", async () => { + it("the server should validate all fields in the expand are valid", async () => { + const notRealUsers = fmTableOccurrence("users", { + not_real_field: textField(), + }); const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => { - // @ts-expect-error - this field is not real - return b.select("not_real_field"); + .expand(users, (b: any) => { + return b.select({ notReal: notRealUsers.not_real_field }); }) .execute({ fetchHandler: createMockFetch( @@ -392,22 +395,22 @@ describe("Entity IDs", () => { auth: { username, password }, }); - const db = connection.database(database!, { - occurrences: occurrencesWithIds, - }); + const db = connection.database(database!, { useEntityIds: true }); const dbWithoutIds = connection.database(database!, { - occurrences: occurrencesWithIds, useEntityIds: false, }); it("should not use entity IDs in the queryString if useEntityIds is false", async () => { const query = dbWithoutIds - .from("contacts") + .from(contactsTOWithIds) .list() - .select("name_renamed", "hobby") - .expand("users") - .filter({ hobby: "Testing" }) + .select({ + name_renamed: contactsTOWithIds.name_renamed, + hobby: contactsTOWithIds.hobby, + }) + .expand(users) + .where(eq(contactsTOWithIds.hobby, "Testing")) .top(1); const queryString = query.getQueryString(); console.log(queryString); @@ -417,9 +420,12 @@ describe("Entity IDs", () => { it("should replace field names in select statements with entity IDs", async () => { const query = db - .from("contacts") + .from(contactsTOWithIds) .list() - .select("name_renamed", "hobby") + .select({ + name_renamed: contactsTOWithIds.name_renamed, + hobby: contactsTOWithIds.hobby, + }) .top(1); const queryString = query.getQueryString(); @@ -433,12 +439,12 @@ describe("Entity IDs", () => { let rawResponseData: any; let capturedPreferHeader: string | null = null; - db.from("contacts") + db.from(contactsTOWithIds) .list() .top(1) .execute({ hooks: { - before: async (req) => { + before: async (req: any) => { const headers = req.headers; capturedPreferHeader = headers.get("Prefer"); return; @@ -449,12 +455,12 @@ describe("Entity IDs", () => { expect(capturedPreferHeader).toBe("fmodata.entity-ids"); const result = await db - .from("contacts") + .from(contactsTOWithIds) .list() .top(1) .execute({ hooks: { - after: async (req, res) => { + after: async (req: any, res: any) => { // Clone the response so we can read it without consuming the original const clonedRes = res.clone(); rawResponseData = await clonedRes.json(); @@ -504,7 +510,11 @@ describe("Entity IDs", () => { it("should not transform if the feature is disabled (even if ids are provided)", async () => { let rawResponseData: any; - const query = dbWithoutIds.from("contacts").list().select("hobby").top(1); + const query = dbWithoutIds + .from(contacts) + .list() + .select({ hobby: contacts.hobby }) + .top(1); // should not use ids when useEntityIds is false expect(query.getQueryString()).toContain("contacts"); @@ -513,7 +523,7 @@ describe("Entity IDs", () => { const result = await query.execute({ hooks: { - after: async (req, res) => { + after: async (req: any, res: any) => { // Clone the response so we can read it without consuming the original const clonedRes = res.clone(); rawResponseData = await clonedRes.json(); @@ -555,20 +565,23 @@ describe("Entity IDs", () => { it("should properly type and validate expanded properties with entity IDs", async () => { // get the first record const result = await db - .from("contacts") + .from(contactsTOWithIds) .list() .top(1) - .select("PrimaryKey") + .select({ PrimaryKey: contactsTOWithIds.PrimaryKey }) .execute(); const firstRecord = result.data?.[0]; assert(firstRecord, "Should have a first record"); + if (!firstRecord.PrimaryKey) { + throw new Error("Expected PrimaryKey to be defined"); + } // now expand the users property const expandedResult = await db - .from("contacts") + .from(contactsTOWithIds) .get(firstRecord.PrimaryKey) - .expand("users"); + .expand(users); // should use the table id in the query string expect(expandedResult.getQueryString()).not.toContain("/contacts("); @@ -586,14 +599,12 @@ describe("Batch Operations", () => { auth: { username, password }, }); - const db = connection.database(database!, { - occurrences: [contactsTOForBatch, usersTOForBatch], - }); + const db = connection.database(database!); const batchCreatedRecordIds: string[] = []; afterEach(async () => { - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); // Delete records by ID for (const recordId of batchCreatedRecordIds) { @@ -609,8 +620,8 @@ describe("Batch Operations", () => { it("should execute simple batch with two GET queries", async () => { // Create two different query builders - const query1 = db.from("contacts").list().top(2); - const query2 = db.from("users").list().top(2); + const query1 = db.from(contacts).list().top(2); + const query2 = db.from(users).list().top(2); // Execute batch const result = await db.batch([query1, query2]).execute(); @@ -637,12 +648,12 @@ describe("Batch Operations", () => { expect(firstContact).toBeDefined(); expect(firstContact).not.toHaveProperty("@odata.id"); expect(firstContact).not.toHaveProperty("@odata.editLink"); - expect(firstContact.hobby).toBe("static-value"); + expect(firstContact.hobby).toBe("Board games"); }); it("should allow adding to a batch after it has been created", async () => { const batch = db.batch([]); - batch.addRequest(db.from("contacts").list().top(2)); + batch.addRequest(db.from(contacts).list().top(2)); const result = await batch.execute(); expect(result.results).toBeDefined(); @@ -657,8 +668,8 @@ describe("Batch Operations", () => { it("should execute batch with mixed operations (GET + POST)", async () => { // Create a GET query and a POST insert - const listQuery = db.from("contacts").list().top(2); - const insertQuery = db.from("contacts").insert({ + const listQuery = db.from(contacts).list().top(2); + const insertQuery = db.from(contacts).insert({ name: "Batch Test User", hobby: "Testing", }); @@ -692,17 +703,17 @@ describe("Batch Operations", () => { it("should execute batch with multiple POST operations in a changeset", async () => { // Create multiple insert operations - const insert1 = db.from("contacts").insert({ + const insert1 = db.from(contacts).insert({ name: "Batch User 1", hobby: "Reading", }); - const insert2 = db.from("contacts").insert({ + const insert2 = db.from(contacts).insert({ name: "Batch User 2", hobby: "Writing", }); - const insert3 = db.from("contacts").insert({ + const insert3 = db.from(contacts).insert({ name: "Batch User 3", hobby: "Gaming", }); @@ -731,7 +742,7 @@ describe("Batch Operations", () => { it("should execute complex batch with multiple operation types", async () => { // First, create a record we can update/delete const setupInsert = await db - .from("contacts") + .from(contacts) .insert({ name: "Test Record for Batch", hobby: "Testing", @@ -746,18 +757,18 @@ describe("Batch Operations", () => { batchCreatedRecordIds.push(testRecordId); // Create a complex batch with multiple operation types - const listQuery = db.from("contacts").list().top(1); - const insertOp = db.from("contacts").insert({ + const listQuery = db.from(contacts).list().top(1); + const insertOp = db.from(contacts).insert({ name: "Complex Batch Insert", hobby: "Batch Testing", }); const updateOp = db - .from("contacts") + .from(contacts) .update({ name: "Updated via Batch", }) .byId(testRecordId); - const deleteOp = db.from("contacts").delete().byId(testRecordId); + const deleteOp = db.from(contacts).delete().byId(testRecordId); // Execute the complex batch const result = await db @@ -804,9 +815,9 @@ describe("Batch Operations", () => { it("should correctly infer tuple types for batch results", async () => { // Create a batch with different operation types - const query1 = db.from("contacts").list().top(1); - const query2 = db.from("users").list().top(1); - const insert = db.from("contacts").insert({ + const query1 = db.from(contacts).list().top(1); + const query2 = db.from(users).list().top(1); + const insert = db.from(contacts).insert({ name: "Type Test User", hobby: "Testing Types", }); @@ -827,25 +838,25 @@ describe("Batch Operations", () => { expectTypeOf(result.results).not.toBeAny(); - const contacts = r1.data; - const users = r2.data; + const contactsData = r1.data; + const usersData = r2.data; const insertedContact = r3.data; - expectTypeOf(contacts).not.toBeAny(); - expectTypeOf(users).not.toBeAny(); + expectTypeOf(contactsData).not.toBeAny(); + expectTypeOf(usersData).not.toBeAny(); expectTypeOf(insertedContact).not.toBeAny(); // Verify types are correctly inferred - expect(Array.isArray(contacts)).toBe(true); - expect(Array.isArray(users)).toBe(true); + expect(Array.isArray(contactsData)).toBe(true); + expect(Array.isArray(usersData)).toBe(true); expect(typeof insertedContact).toBe("object"); - const firstContact = contacts[0]!; + const firstContact = contactsData[0]!; expect(firstContact).toBeDefined(); - const hobby: string = firstContact.hobby; + const hobby: string | null = firstContact.hobby; expect(typeof hobby).toBe("string"); - const firstUser = users[0]!; + const firstUser = usersData[0]!; expect(firstUser).toBeDefined(); expectTypeOf(firstUser.name).not.toBeAny(); @@ -858,12 +869,15 @@ describe("Batch Operations", () => { it("should execute batch with 3 GET operations each with a filter", async () => { // Create three GET queries with different filters - const query1 = db.from("contacts").list().filter({ hobby: "static-value" }); - const query2 = db.from("contacts").list().filter({ id_user: "never" }); - const query3 = db - .from("users") + const query1 = db + .from(contacts) + .list() + .where(eq(contacts.hobby, "static-value")); + const query2 = db + .from(contacts) .list() - .filter({ name: { ne: null } }); + .where(eq(contacts.id_user, "never")); + const query3 = db.from(users).list().where(isNotNull(users.name)); let flag = 1; // Execute batch diff --git a/packages/fmodata/tests/e2e/setup.ts b/packages/fmodata/tests/e2e/setup.ts index 9ccbe807..51e17c62 100644 --- a/packages/fmodata/tests/e2e/setup.ts +++ b/packages/fmodata/tests/e2e/setup.ts @@ -8,11 +8,10 @@ import path from "path"; import { config } from "dotenv"; import { - FMServerConnection, - defineBaseTable, - defineTableOccurrence, - buildOccurrences, -} from "../../src/index"; + fmTableOccurrence, + textField, + timestampField, +} from "@proofkit/fmodata"; import { z } from "zod/v4"; config({ path: path.resolve(__dirname, "../../.env.local") }); @@ -24,177 +23,103 @@ export const username = process.env.FMODATA_USERNAME; export const password = process.env.FMODATA_PASSWORD; export const database = process.env.FMODATA_DATABASE; -// Schema for contacts table (used in basic E2E tests) -export const contactsBase = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - hobby: z.string().nullable(), - id_user: z.string().nullable(), +// Define TOs with navigationPaths +export const contacts = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), }, - idField: "PrimaryKey", -}); - -// Schema for users table (used in basic E2E tests) -export const usersBase = defineBaseTable({ - schema: { - id: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - id_customer: z.string().nullable(), + { + navigationPaths: ["users"], }, - idField: "id", -}); - -// Phase 1: Define base TOs (without navigation) -const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, -}); - -const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, -}); - -// Phase 2: Build final TOs with navigation -export const [contactsTO, usersTO] = buildOccurrences({ - occurrences: [_contactsTO, _usersTO], - navigation: { - contacts: ["users"], - users: ["contacts"], +); + +export const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + id_customer: textField(), }, -}); - -// Schema for contacts table with IDs (used in entity-ids tests) -export const contactsBaseWithIds = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - CreationTimestamp: z.string(), - CreatedBy: z.string(), - ModificationTimestamp: z.string(), - ModifiedBy: z.string(), - name_renamed: z.string().nullable(), - hobby: z.string().nullable(), - id_user: z.string().nullable(), + { + navigationPaths: ["contacts"], }, - idField: "PrimaryKey", - readOnly: [ - "CreationTimestamp", - "CreatedBy", - "ModificationTimestamp", - "ModifiedBy", - ] as const, - fmfIds: { - PrimaryKey: "FMFID:4296032390", - CreationTimestamp: "FMFID:8590999686", - CreatedBy: "FMFID:12885966982", - ModificationTimestamp: "FMFID:17180934278", - ModifiedBy: "FMFID:21475901574", - name_renamed: "FMFID:25770868870", // in FM: "name" - hobby: "FMFID:30065836166", - id_user: "FMFID:38655770758", +); + +// Define TOs with entity IDs and navigationPaths +export const contactsTOWithIds = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey().entityId("FMFID:4296032390"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:8590999686"), + CreatedBy: textField().readOnly().entityId("FMFID:12885966982"), + ModificationTimestamp: timestampField() + .readOnly() + .entityId("FMFID:17180934278"), + ModifiedBy: textField().readOnly().entityId("FMFID:21475901574"), + name_renamed: textField().entityId("FMFID:25770868870"), // in FM: "name" + hobby: textField().entityId("FMFID:30065836166"), + id_user: textField().entityId("FMFID:38655770758"), }, -}); - -// Schema for users table with IDs (used in entity-ids tests) -export const usersBaseWithIds = defineBaseTable({ - schema: { - id: z.string(), - CreationTimestamp: z.string(), - CreatedBy: z.string(), - ModificationTimestamp: z.string(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - id_customer: z.string().nullable(), + { + entityId: "FMTID:1065094", + navigationPaths: ["users"], }, - idField: "id", - readOnly: [ - "CreationTimestamp", - "CreatedBy", - "ModifiedBy", - "ModificationTimestamp", - ] as const, - fmfIds: { - id: "FMFID:4296032389", - CreationTimestamp: "FMFID:8590999685", - CreatedBy: "FMFID:12885966981", - ModificationTimestamp: "FMFID:17180934277", - ModifiedBy: "FMFID:21475901573", - name: "FMFID:25770868869", - id_customer: "FMFID:30065836165", +); + +export const usersTOWithIds = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:4296032389"), + CreationTimestamp: timestampField().readOnly().entityId("FMFID:8590999685"), + CreatedBy: textField().readOnly().entityId("FMFID:12885966981"), + ModificationTimestamp: timestampField() + .readOnly() + .entityId("FMFID:17180934277"), + ModifiedBy: textField().readOnly().entityId("FMFID:21475901573"), + name: textField().entityId("FMFID:25770868869"), + id_customer: textField().entityId("FMFID:30065836165"), }, -}); - -// Phase 1: Define base TOs with entity IDs (without navigation) -const _contactsTOWithIds = defineTableOccurrence({ - fmtId: "FMTID:1065094", - name: "contacts", - baseTable: contactsBaseWithIds, -}); - -const _usersTOWithIds = defineTableOccurrence({ - fmtId: "FMTID:1065093", - name: "users", - baseTable: usersBaseWithIds, -}); - -// Phase 2: Build final TOs with navigation -export const occurrencesWithIds = buildOccurrences({ - occurrences: [_contactsTOWithIds, _usersTOWithIds], - navigation: { - contacts: ["users"], - users: ["contacts"], + { + entityId: "FMTID:1065093", + navigationPaths: ["contacts"], }, -}); +); -// Export individual TOs for tests that need them -export const [contactsTOWithIds, usersTOWithIds] = occurrencesWithIds; +// Export occurrences array for backward compatibility +export const occurrencesWithIds = [contactsTOWithIds, usersTOWithIds] as const; // Schema for batch operations tests -export const contactsBaseForBatch = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - hobby: z - .string() - .nullable() - .transform((val) => "static-value"), - id_user: z.string().nullable(), - }, - idField: "PrimaryKey", -}); - -export const usersBaseForBatch = defineBaseTable({ - schema: { - id: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - id_customer: z.string().nullable(), - }, - idField: "id", -}); - -export const contactsTOForBatch = defineTableOccurrence({ - name: "contacts" as const, - baseTable: contactsBaseForBatch, +export const contactsTOForBatch = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField().readValidator( + z.string().transform((val) => "static-value"), + ), + id_user: textField(), }); -export const usersTOForBatch = defineTableOccurrence({ - name: "users" as const, - baseTable: usersBaseForBatch, +export const usersTOForBatch = fmTableOccurrence("users", { + id: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + id_customer: textField(), }); diff --git a/packages/fmodata/tests/errors.test.ts b/packages/fmodata/tests/errors.test.ts index 8060a9b6..10e01f40 100644 --- a/packages/fmodata/tests/errors.test.ts +++ b/packages/fmodata/tests/errors.test.ts @@ -13,8 +13,9 @@ import { describe, it, expect, assert } from "vitest"; import { z, ZodError } from "zod/v4"; import { - defineBaseTable, - defineTableOccurrence, + fmTableOccurrence, + textField, + numberField, HTTPError, ODataError, SchemaLockedError, @@ -27,7 +28,7 @@ import { isSchemaLockedError, isResponseStructureError, isRecordCountMismatchError, -} from "../src/index"; +} from "@proofkit/fmodata"; import { createMockClient } from "./utils/test-setup"; import { simpleMock, createMockFetch } from "./utils/mock-fetch"; import { validateHeaderValue } from "http"; @@ -35,27 +36,19 @@ import { validateHeaderValue } from "http"; describe("Error Handling", () => { const client = createMockClient(); - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string().email(), - active: z.boolean(), - age: z.number().int().min(0).max(150), - }, - idField: "id", - }); - - const usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, + const users = fmTableOccurrence("users", { + id: textField().primaryKey(), + username: textField(), + email: textField().readValidator(z.string().email()), + active: numberField().readValidator(z.coerce.boolean()), + age: numberField().readValidator(z.number().int().min(0).max(150)), }); describe("HTTP Errors", () => { it("should return HTTPError for 404 Not Found", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 404 }), @@ -73,9 +66,9 @@ describe("Error Handling", () => { }); it("should return HTTPError for 401 Unauthorized", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 401 }), @@ -90,9 +83,9 @@ describe("Error Handling", () => { }); it("should return HTTPError for 500 Server Error", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 500 }), @@ -109,9 +102,9 @@ describe("Error Handling", () => { it("should include response body in HTTPError", async () => { const errorBody = { message: "Custom error message" }; - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ @@ -136,9 +129,9 @@ describe("Error Handling", () => { }, }; - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -166,9 +159,9 @@ describe("Error Handling", () => { }, }; - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -198,9 +191,9 @@ describe("Error Handling", () => { }, }; - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -219,7 +212,7 @@ describe("Error Handling", () => { describe("Validation Errors", () => { it("should return ValidationError when schema validation fails", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); // Return data that doesn't match schema (email is invalid, age is out of range) const invalidData = [ @@ -233,7 +226,7 @@ describe("Error Handling", () => { ]; const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch(invalidData), @@ -250,7 +243,7 @@ describe("Error Handling", () => { }); it("should preserve Standard Schema issues in cause property", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const invalidData = [ { @@ -263,7 +256,7 @@ describe("Error Handling", () => { ]; const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch(invalidData), @@ -290,7 +283,7 @@ describe("Error Handling", () => { }); it("should include field name in ValidationError", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const invalidData = [ { @@ -303,7 +296,7 @@ describe("Error Handling", () => { ]; const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch(invalidData), @@ -319,11 +312,11 @@ describe("Error Handling", () => { describe("Response Structure Errors", () => { it("should return ResponseStructureError for invalid response structure", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); // Return invalid structure (not an object) const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -343,10 +336,10 @@ describe("Error Handling", () => { }); it("should return ResponseStructureError when value is not an array", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -365,7 +358,7 @@ describe("Error Handling", () => { describe("Record Count Mismatch Errors", () => { it("should return RecordCountMismatchError for single() when multiple records found", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const multipleRecords = [ { @@ -385,7 +378,7 @@ describe("Error Handling", () => { ]; const result = await db - .from("users") + .from(users) .list() .single() .execute({ @@ -401,10 +394,10 @@ describe("Error Handling", () => { }); it("should return RecordCountMismatchError for single() when no records found", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .single() .execute({ @@ -422,9 +415,9 @@ describe("Error Handling", () => { describe("Type Guards", () => { it("should correctly identify HTTPError using type guard", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 404 }), @@ -440,9 +433,9 @@ describe("Error Handling", () => { }); it("should correctly identify ValidationError using type guard", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch([ @@ -466,9 +459,9 @@ describe("Error Handling", () => { }); it("should correctly identify ODataError using type guard", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -490,9 +483,9 @@ describe("Error Handling", () => { }); it("should correctly identify SchemaLockedError using type guard", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -517,9 +510,9 @@ describe("Error Handling", () => { }); it("should correctly identify ResponseStructureError using type guard", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: createMockFetch({ @@ -536,9 +529,9 @@ describe("Error Handling", () => { }); it("should correctly identify RecordCountMismatchError using type guard", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .single() .execute({ @@ -567,9 +560,9 @@ describe("Error Handling", () => { describe("Error Properties", () => { it("should include timestamp in all errors", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 404 }), @@ -582,9 +575,9 @@ describe("Error Handling", () => { }); it("should include kind property for discriminated unions", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 404 }), @@ -599,9 +592,9 @@ describe("Error Handling", () => { describe("Error Handling Patterns", () => { it("should allow instanceof checks (like ffetch pattern)", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 404 }), @@ -617,9 +610,9 @@ describe("Error Handling", () => { }); it("should allow switch statement on kind property", async () => { - const db = client.database("testdb", { occurrences: [usersTO] }); + const db = client.database("testdb"); const result = await db - .from("users") + .from(users) .list() .execute({ fetchHandler: simpleMock({ status: 404 }), diff --git a/packages/fmodata/tests/expands.test.ts b/packages/fmodata/tests/expands.test.ts index 0a913f54..d5de7a22 100644 --- a/packages/fmodata/tests/expands.test.ts +++ b/packages/fmodata/tests/expands.test.ts @@ -7,123 +7,152 @@ * DO NOT RUN THESE TESTS YET - they define the API we want to build. */ -import { describe, it, expect, expectTypeOf } from "vitest"; +import { describe, it, expect, expectTypeOf, assert } from "vitest"; import { z } from "zod/v4"; -import { defineBaseTable, defineTableOccurrence, buildOccurrences } from "../src/index"; -import { InferSchemaType } from "../src/types"; -import { createMockFetch } from "./utils/mock-fetch"; +import { + fmTableOccurrence, + textField, + numberField, + eq, +} from "@proofkit/fmodata"; +import { createMockClient, users, contacts } from "./utils/test-setup"; +import { first } from "es-toolkit/compat"; +import { simpleMock } from "./utils/mock-fetch"; import { mockResponses } from "./fixtures/responses"; -import { createMockClient } from "./utils/test-setup"; describe("Expand API Specification", () => { - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - hobby: z.string().optional(), - id_user: z.string(), + // Spec test table definitions (simplified for type testing) + const userCustomer = fmTableOccurrence( + "user_customer", + { + id: textField().primaryKey(), + name: textField().notNull(), + address: textField(), + tier: textField().notNull(), }, - idField: "id", - }); - - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - active: z.boolean(), - id_customer: z.string(), + { + defaultSelect: "all", }, - idField: "id", - }); - - const customerBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - address: z.string().optional(), - tier: z.string(), + ); + + const contacts = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField().notNull(), + hobby: textField(), + id_user: textField().notNull(), }, - idField: "id", - }); - - // Phase 1: Define base TOs (without navigation) - const _customerTO = defineTableOccurrence({ - name: "user_customer", - baseTable: customerBase, - defaultSelect: "all", - }); - - const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - defaultSelect: "all", - }); - - const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: "all", - }); - - // Phase 2: Build final TOs with navigation - const [customerTO, contactsTO, usersTO] = buildOccurrences({ - occurrences: [_customerTO, _contactsTO, _usersTO], - navigation: { - contacts: ["users"], - users: ["user_customer", "contacts"], + { + defaultSelect: "all", + navigationPaths: ["users", "other_users"], }, - }); + ); + + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + id_customer: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["user_customer", "contacts"], + }, + ); + + const otherUsers = fmTableOccurrence( + "other_users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + id_customer: textField().notNull(), + }, + { + defaultSelect: "all", + }, + ); + + // Real server schema table definitions (for validation tests that use captured responses) + const contactsReal = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: textField(), + CreatedBy: textField(), + ModificationTimestamp: textField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + my_calc: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["users"], + }, + ); + + const usersReal = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + CreationTimestamp: textField(), + CreatedBy: textField(), + ModificationTimestamp: textField(), + ModifiedBy: textField(), + name: textField(), + id_customer: textField(), + }, + { + defaultSelect: "all", + navigationPaths: ["user_customer", "contacts"], + }, + ); const client = createMockClient(); - type UserFieldNames = keyof InferSchemaType; - type CustomerFieldNames = keyof InferSchemaType; + // type UserFieldNames = keyof InferTableSchema; + // type CustomerFieldNames = keyof InferTableSchema; - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO, customerTO], - }); + const db = client.database("test_db"); describe("Simple expand (no callback)", () => { it("should generate query string for simple expand", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users") + .expand(users) .getQueryString(); expect(queryString).toBe("/contacts?$top=1000&$expand=users"); }); - it("should allow arbitrary string relations", () => { - const queryString = db - .from("contacts") + it("should not allow arbitrary string relations", () => { + db.from(contacts) .list() + // @ts-expect-error - arbitrary string relation .expand("arbitrary_relation") .getQueryString(); - expect(queryString).toBe( - "/contacts?$top=1000&$expand=arbitrary_relation", - ); - }); - - it("should provide autocomplete for known relations", () => { - const entitySet = db.from("contacts"); - - // This should show autocomplete for "users" | (string & {}) - expectTypeOf(entitySet.list().expand) - .parameter(0) - .not.toEqualTypeOf(); }); }); describe("Expand with callback - select", () => { it("should type callback builder to target table schema", () => { - db.from("contacts") + db.from(contacts) .list() - .expand("users", (builder) => { + .expand(users, (builder) => { // builder.select should only accept fields from users table expectTypeOf(builder.select).parameter(0).not.toEqualTypeOf(); - return builder.select("username", "email"); + return builder.select({ + username: users.username, + email: users.email, + }); }); }); @@ -131,14 +160,10 @@ describe("Expand API Specification", () => { async () => { // checking types only, don't actually make a request const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => - b.select( - "username", - "email", - // "id_customer" - ), + .expand(users, (b) => + b.select({ username: users.username, email: users.email }), ) .execute(); @@ -166,9 +191,11 @@ describe("Expand API Specification", () => { it("should generate query string with $select", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.select("username", "email")) + .expand(users, (b) => + b.select({ username: users.username, email: users.email }), + ) .getQueryString(); expect(queryString).toBe( @@ -177,11 +204,11 @@ describe("Expand API Specification", () => { }); it("should enforce callback returns builder", () => { - db.from("contacts") + db.from(contacts) .list() - .expand("users", (b) => { + .expand(users, (b) => { // Must return the builder - return b.select("username"); + return b.select({ username: users.username }); }); }); }); @@ -189,9 +216,9 @@ describe("Expand API Specification", () => { describe("Expand with callback - filter", () => { it("should generate query string with $filter", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.filter({ active: true })) + .expand(users, (b) => b.where(eq(users.active, 1))) .getQueryString(); expect(queryString).toContain("$expand=users($filter=active"); @@ -201,9 +228,9 @@ describe("Expand API Specification", () => { describe("Expand with callback - orderBy", () => { it("should generate query string with $orderby", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.orderBy("username")) + .expand(users, (b) => b.orderBy("username")) .getQueryString(); expect(queryString).toContain("$expand=users($orderby=username"); @@ -213,9 +240,9 @@ describe("Expand API Specification", () => { describe("Expand with callback - top and skip", () => { it("should generate query string with $top", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.top(5)) + .expand(users, (b) => b.top(5)) .getQueryString(); expect(queryString).toContain("$expand=users($top=5"); @@ -223,9 +250,9 @@ describe("Expand API Specification", () => { it("should generate query string with $skip", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.skip(10)) + .expand(users, (b) => b.skip(10)) .getQueryString(); expect(queryString).toContain("$expand=users($skip=10"); @@ -235,10 +262,10 @@ describe("Expand API Specification", () => { describe("Multiple expands (chaining)", () => { it("should allow chaining multiple expand calls", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.select("username")) - .expand("other_users") + .expand(users, (b) => b.select({ username: users.username })) + .expand(otherUsers) .getQueryString(); expect(queryString).toBe( @@ -247,44 +274,214 @@ describe("Expand API Specification", () => { }); it("should type each expand callback independently", () => { - db.from("contacts") + db.from(contacts) .list() - .expand("users", (builder) => { + .expand(users, (builder) => { // First callback typed to users expectTypeOf(builder.select).parameter(0).not.toEqualTypeOf(); - return builder.select("username"); + return builder.select({ username: users.username }); }) - .expand("other_users", (builder) => { + .expand(otherUsers, (builder) => { // Second callback - arbitrary relation so accepts any - return builder.select("email"); + return builder.select({ email: otherUsers.email }); }); }); }); describe("Nested expands", () => { it("should type nested expand callback to nested target schema", () => { - db.from("contacts") + const query = db + .from(contacts) .list() - .expand("users", (usersBuilder) => { + .expand(users, (usersBuilder) => { return usersBuilder - .select("username", "email") - .expand("user_customer", (customerBuilder) => { + .select({ username: users.username, email: users.email }) + .expand(userCustomer, (customerBuilder) => { // customerBuilder should be typed to customer schema // Verify it accepts valid fields - return customerBuilder.select("name", "tier"); + return customerBuilder.select({ + name: userCustomer.name, + tier: userCustomer.tier, + }); }); }); + + // type tests, don't run this code + async () => { + const result = await query.execute(); + + const firstRecord = result.data![0]!; + + const firstUser = firstRecord.users[0]!; + + // @ts-expect-error - this field was not selected, so it shouldn't be in the type + firstUser.id_customer; + expectTypeOf(firstUser).not.toHaveProperty("id_customer"); + expectTypeOf(firstUser).toHaveProperty("username"); + }; + }); + + it("should validate nested expands on single record", async () => { + // This test uses real server schema (contactsReal, usersReal) to match captured responses + const mockData = mockResponses["deep nested expand"]; + const result = await db + .from(contactsReal) + .get("B5BFBC89-03E0-47FC-ABB6-D51401730227") + .expand(usersReal, (usersBuilder) => { + return usersBuilder + .select({ name: usersReal.name, id: usersReal.id }) + .expand(userCustomer, (customerBuilder) => { + return customerBuilder.select({ name: userCustomer.name }); + }); + }) + .execute({ + fetchHandler: simpleMock({ + status: mockData.status, + body: mockData.response, + headers: mockData.headers, + }), + }); + + assert(result.data, "Result data should be defined"); + expect(result.data.name).toBe("Eric"); + expect(result.data.hobby).toBe("Board games"); + expect(result.data.users).toBeDefined(); + + // Type check: verify that only selected fields are typed correctly + const firstUser = result.data.users?.[0]; + assert(firstUser, "First user should be defined"); + expectTypeOf(firstUser).toHaveProperty("name"); + expectTypeOf(firstUser).toHaveProperty("id"); + expectTypeOf(firstUser).toHaveProperty("user_customer"); + // @ts-expect-error - id_customer was not selected, should not be in type + expectTypeOf(firstUser.id_customer).toBeNever(); + + // Verify nested expand structure exists + expect(firstUser.id).toBe("1A269FA3-82E6-465A-94FA-39EE3F2F9B5D"); + expect(firstUser.name).toBe("Test User"); + expect(firstUser.user_customer).toBeDefined(); + expect(Array.isArray(firstUser.user_customer)).toBe(true); + expect(firstUser.user_customer.length).toBe(1); + + // Verify nested customer data + const firstCustomer = firstUser.user_customer?.[0]; + assert(firstCustomer, "First customer should be defined"); + + expectTypeOf(firstCustomer).toHaveProperty("name"); + // @ts-expect-error - other fields were not selected + expectTypeOf(firstCustomer.address).toBeNever(); + // @ts-expect-error - tier was not selected + expectTypeOf(firstCustomer.tier).toBeNever(); + + expect(firstCustomer.name).toBe("test"); + }); + + it("should validate nested expands on list query", async () => { + // This test uses real server schema (contactsReal, usersReal) to match captured responses + const mockData = mockResponses["list with nested expand"]; + const result = await db + .from(contactsReal) + .list() + .expand(usersReal, (usersBuilder) => { + // No select on users - all fields should be returned + return usersBuilder.expand(userCustomer, (customerBuilder) => { + return customerBuilder.select({ name: userCustomer.name }); + }); + }) + .execute({ + fetchHandler: simpleMock({ + status: mockData.status, + body: mockData.response, + headers: mockData.headers, + }), + }); + + expect(result.data).toBeDefined(); + expect(Array.isArray(result.data)).toBe(true); + expect(result.data?.length).toBe(2); + + // Type check: verify list results are properly typed + const firstContact = result.data?.[0]; + if (firstContact) { + // Contact should have all its fields (no select was called on contacts) + expectTypeOf(firstContact).toHaveProperty("name"); + expectTypeOf(firstContact).toHaveProperty("PrimaryKey"); + expectTypeOf(firstContact).toHaveProperty("hobby"); + + // Verify users expand exists and is typed correctly + expectTypeOf(firstContact).toHaveProperty("users"); + expectTypeOf(firstContact.users).toBeArray(); + + // Verify runtime data (note: response has 'name' not 'id' due to real server schema) + expect(firstContact.name).toBe("Eric"); + expect(firstContact.hobby).toBe("Board games"); + expect(firstContact.users).toBeDefined(); + expect(Array.isArray(firstContact.users)).toBe(true); + expect(firstContact.users.length).toBe(1); + + const firstUser = firstContact.users?.[0]; + if (firstUser) { + // All user fields should be present (no select was used) + expectTypeOf(firstUser).toHaveProperty("id"); + expectTypeOf(firstUser).toHaveProperty("name"); + expectTypeOf(firstUser).toHaveProperty("id_customer"); + expectTypeOf(firstUser).toHaveProperty("user_customer"); + + // Verify runtime data exists + expect(firstUser.id).toBe("1A269FA3-82E6-465A-94FA-39EE3F2F9B5D"); + expect(firstUser.name).toBe("Test User"); + expect(firstUser.id_customer).toBe( + "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + ); + expect(firstUser.user_customer).toBeDefined(); + expect(Array.isArray(firstUser.user_customer)).toBe(true); + expect(firstUser.user_customer.length).toBe(1); + + // Verify nested customer data with selected fields only + const firstCustomer = firstUser.user_customer?.[0]; + if (firstCustomer) { + // Only 'name' was selected in nested expand + expectTypeOf(firstCustomer).toHaveProperty("name"); + // @ts-expect-error - address was not selected, should not be in type + expectTypeOf(firstCustomer.address).toBeNever(); + // @ts-expect-error - tier was not selected, should not be in type + expectTypeOf(firstCustomer.tier).toBeNever(); + + expect(firstCustomer.name).toBe("test"); + } + } + + // Check second contact which has a different user structure + const secondContact = result.data?.[1]; + if (secondContact) { + expect(secondContact.name).toBe("Adam"); + expect(secondContact.hobby).toBe("trees"); + expect(secondContact.users).toBeDefined(); + expect(secondContact.users.length).toBe(1); + + const secondUser = secondContact.users?.[0]; + if (secondUser) { + expect(secondUser.id).toBe("53D36C9A-8F90-4C21-A38F-F278D4F77718"); + expect(secondUser.name).toBe("adam user"); + expect(secondUser.id_customer).toBeNull(); + // This user has no customer, should be empty array + expect(secondUser.user_customer).toEqual([]); + } + } + } }); it("should generate query string with nested $expand", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => + .expand(users, (b) => b - .select("username") - .expand("user_customer", (nested) => nested.select("name")), + .select({ username: users.username }) + .expand(userCustomer, (nested) => + nested.select({ name: userCustomer.name }), + ), ) .getQueryString(); @@ -295,12 +492,12 @@ describe("Expand API Specification", () => { it("should support deeply nested expands (3 levels)", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => - b.expand("user_customer", (nested) => + .expand(users, (b) => + b.expand(userCustomer, (nested) => // If customer had relations, we could expand further - nested.select("name"), + nested.select({ name: userCustomer.name }), ), ) .getQueryString(); @@ -312,15 +509,17 @@ describe("Expand API Specification", () => { describe("Complex combinations", () => { it("should support select + filter + orderBy + nested expand", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => + .expand(users, (b) => b - .select("username", "email") - .filter({ active: true }) + .select({ username: users.username, email: users.email }) + .where(eq(users.active, 1)) .orderBy("username") .top(10) - .expand("user_customer", (nested) => nested.select("name")), + .expand(userCustomer, (nested) => + nested.select({ name: userCustomer.name }), + ), ) .getQueryString(); @@ -334,48 +533,27 @@ describe("Expand API Specification", () => { it("should support multiple expands with different options", () => { const queryString = db - .from("contacts") - .list() - .expand("users", (b) => b.select("username").filter({ active: true })) - .expand("other_users", (b) => b.select("email").top(5)) - .getQueryString(); - - expect(queryString).toBe( - "/contacts?$top=1000&$expand=users($select=username;$filter=active eq true),other_users($select=email;$top=5)", - ); - }); - }); - - describe("Arbitrary relations (string escape hatch)", () => { - it("should allow expanding arbitrary relations not in schema", () => { - const queryString = db - .from("contacts") + .from(contacts) .list() - .expand("unknown_relation", (b) => b.select("arbitrary_field")) + .expand(users, (b) => + b.select({ username: users.username }).where(eq(users.active, 1)), + ) + .expand(otherUsers, (b) => b.select({ email: otherUsers.email }).top(5)) .getQueryString(); expect(queryString).toBe( - "/contacts?$top=1000&$expand=unknown_relation($select=arbitrary_field)", + "/contacts?$top=1000&$expand=users($select=username;$filter=active eq 1),other_users($select=email;$top=5)", ); }); - - it("should type arbitrary relation callback generically", () => { - db.from("contacts") - .list() - .expand("unknown", (builder) => { - // Should allow arbitrary field names - return builder.select("any_field" as any); - }); - }); }); describe("Integration with existing query methods", () => { it("should work with select on parent query", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .select("name", "hobby") - .expand("users", (b) => b.select("username")) + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b) => b.select({ username: users.username })) .getQueryString(); expect(queryString).toContain("$select=name,hobby"); @@ -384,10 +562,10 @@ describe("Expand API Specification", () => { it("should work with filter on parent query", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .filter({ name: { eq: "Eric" } }) - .expand("users") + .where(eq(contacts.name, "Eric")) + .expand(users) .getQueryString(); expect(queryString).toContain("$filter=name eq"); @@ -396,12 +574,12 @@ describe("Expand API Specification", () => { it("should work with orderBy, top, skip on parent query", () => { const queryString = db - .from("contacts") + .from(contacts) .list() .orderBy("name") .top(20) .skip(10) - .expand("users", (b) => b.select("username")) + .expand(users, (b) => b.select({ username: users.username })) .getQueryString(); expect(queryString).toContain("$orderby=name"); diff --git a/packages/fmodata/tests/field-id-transforms.test.ts b/packages/fmodata/tests/field-id-transforms.test.ts index 076a70fc..aaf9d9ae 100644 --- a/packages/fmodata/tests/field-id-transforms.test.ts +++ b/packages/fmodata/tests/field-id-transforms.test.ts @@ -11,9 +11,13 @@ */ import { describe, it, expect, beforeEach } from "vitest"; -import { createMockClient } from "./utils/test-setup"; -import { occurrencesWithIds } from "./utils/test-setup"; +import { + createMockClient, + contactsTOWithIds, + usersTOWithIds, +} from "./utils/test-setup"; import { simpleMock } from "./utils/mock-fetch"; +import { eq } from "@proofkit/fmodata"; describe("Field ID Transformation", () => { let capturedRequests: Array<{ url: string; options: any }> = []; @@ -26,7 +30,7 @@ describe("Field ID Transformation", () => { it("should send request with FMFIDs and FMTID", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { @@ -44,11 +48,15 @@ describe("Field ID Transformation", () => { }; await db - .from("users") + .from(usersTOWithIds) .list() - .select("id", "name", "active") + .select({ + id: usersTOWithIds.id, + name: usersTOWithIds.name, + active: usersTOWithIds.active, + }) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -67,9 +75,7 @@ describe("Field ID Transformation", () => { it("should transform FMFID response keys back to field names", async () => { const connection = createMockClient(); - const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, - }); + const db = connection.database("test.fmp12"); const mockResponse = { "@context": "https://api.example.com/$metadata#users", @@ -94,11 +100,15 @@ describe("Field ID Transformation", () => { }; const result = await db - .from("users") + .from(usersTOWithIds) .list() - .select("id", "name", "active") + .select({ + id: usersTOWithIds.id, + name: usersTOWithIds.name, + active: usersTOWithIds.active, + }) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -124,18 +134,18 @@ describe("Field ID Transformation", () => { it("should transform field names to FMFIDs in filter", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { value: [] }; await db - .from("users") + .from(usersTOWithIds) .list() - .select("id", "name") - .filter({ active: { eq: true } }) + .select({ id: usersTOWithIds.id, name: usersTOWithIds.name }) + .where(eq(usersTOWithIds.active, true)) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -145,7 +155,7 @@ describe("Field ID Transformation", () => { // Verify filter uses FMFID for the field name const request = capturedRequests[0]!; expect(decodeURIComponent(request.url)).toContain("FMFID:7"); // active field in filter - expect(request.url).toContain("eq%20true"); + expect(request.url).toContain("eq%201"); }); }); @@ -153,18 +163,18 @@ describe("Field ID Transformation", () => { it("should transform field names to FMFIDs in orderBy", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { value: [] }; await db - .from("users") + .from(usersTOWithIds) .list() - .select("id", "name") + .select({ id: usersTOWithIds.id, name: usersTOWithIds.name }) .orderBy(["name", "desc"]) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -181,7 +191,7 @@ describe("Field ID Transformation", () => { it("should use FMTID in URL", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { @@ -193,10 +203,10 @@ describe("Field ID Transformation", () => { }; await db - .from("users") + .from(usersTOWithIds) .get("550e8400-e29b-41d4-a716-446655440001") .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -213,7 +223,7 @@ describe("Field ID Transformation", () => { it("should transform response field IDs back to names", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { @@ -232,10 +242,10 @@ describe("Field ID Transformation", () => { }; const result = await db - .from("users") + .from(usersTOWithIds) .get("550e8400-e29b-41d4-a716-446655440001") .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -255,7 +265,7 @@ describe("Field ID Transformation", () => { it("should transform field names to FMFIDs in request body", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { @@ -274,12 +284,12 @@ describe("Field ID Transformation", () => { let capturedBody: any; const result = await db - .from("users") + .from(usersTOWithIds) .insert({ name: "Charlie", active: true, fake_field: "test", - } as any) // Cast to bypass required field validation in tests + }) .execute({ fetchHandler: async (input, init) => { let url = input instanceof Request ? input.url : input.toString(); @@ -303,7 +313,7 @@ describe("Field ID Transformation", () => { // Check that the body has FMFIDs (not field names) expect(capturedBody).toMatchObject({ "FMFID:6": "Charlie", // name - "FMFID:7": true, // active + "FMFID:7": 1, // active (number field, 1 = true) "FMFID:8": "test", // fake_field }); }); @@ -311,7 +321,7 @@ describe("Field ID Transformation", () => { it("should transform response field IDs back to names", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { @@ -330,12 +340,12 @@ describe("Field ID Transformation", () => { }; const result = await db - .from("users") + .from(usersTOWithIds) .insert({ name: "Charlie", active: true, fake_field: "test", - } as any) + }) .execute({ fetchHandler: async (input, init) => { const url = input instanceof Request ? input.url : input.toString(); @@ -356,12 +366,12 @@ describe("Field ID Transformation", () => { it("should transform field names to FMFIDs in update body", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); let capturedBody: any; await db - .from("users") + .from(usersTOWithIds) .update({ name: "Alice Updated", active: false, @@ -390,7 +400,7 @@ describe("Field ID Transformation", () => { // Check that the body has FMFIDs (not field names) expect(capturedBody).toMatchObject({ "FMFID:6": "Alice Updated", // name - "FMFID:7": false, // active + "FMFID:7": 0, // active (number field, 0 = false) }); }); }); @@ -399,17 +409,19 @@ describe("Field ID Transformation", () => { it("should use FMFIDs for expanded relation fields", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { value: [] }; await db - .from("contacts") + .from(contactsTOWithIds) .list() - .expand("users", (b) => b.select("id", "name")) + .expand(usersTOWithIds, (b: any) => + b.select({ id: usersTOWithIds.id, name: usersTOWithIds.name }), + ) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -426,7 +438,7 @@ describe("Field ID Transformation", () => { it("should transform expanded relation response fields back to names", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { @@ -464,11 +476,13 @@ describe("Field ID Transformation", () => { }; const result = await db - .from("contacts") + .from(contactsTOWithIds) .list() - .expand("users", (b) => b.select("id", "name")) + .expand(usersTOWithIds, (b: any) => + b.select({ id: usersTOWithIds.id, name: usersTOWithIds.name }), + ) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); capturedRequests.push({ url, options: init }); return simpleMock({ body: mockResponse, status: 200 })(input, init); @@ -507,17 +521,17 @@ describe("Field ID Transformation", () => { it("should include 'Prefer: fmodata.entity-ids' header when using entity IDs", async () => { const connection = createMockClient(); const db = connection.database("test.fmp12", { - occurrences: occurrencesWithIds, + useEntityIds: true, }); const mockResponse = { value: [] }; await db - .from("users") + .from(usersTOWithIds) .list() - .select("id", "name") + .select({ id: usersTOWithIds.id, name: usersTOWithIds.name }) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const url = input instanceof Request ? input.url : input.toString(); const headers = (init as RequestInit)?.headers as Record< string, diff --git a/packages/fmodata/tests/filters.test.ts b/packages/fmodata/tests/filters.test.ts index 02b496cb..2e5d439e 100644 --- a/packages/fmodata/tests/filters.test.ts +++ b/packages/fmodata/tests/filters.test.ts @@ -13,153 +13,190 @@ * 3. The mock fetch will automatically match the request URL to the stored response */ -import { describe, it, expect } from "vitest"; -import { occurrences, createMockClient } from "./utils/test-setup"; +import { describe, it, expect, expectTypeOf } from "vitest"; +import { + eq, + ne, + gt, + gte, + lt, + lte, + contains, + startsWith, + endsWith, + inArray, + notInArray, + and, + or, + isNull, + isNotNull, + fmTableOccurrence, + textField, +} from "@proofkit/fmodata"; +import { + createMockClient, + users, + contacts, + usersTOWithIds, +} from "./utils/test-setup"; +import { z } from "zod/v4"; describe("Filter Tests", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); it("should enforce correct operator types for each field type", () => { - // ✅ String operators (single operator object) + // ✅ String operators const stringQuery = db - .from("contacts") + .from(contacts) .list() - .filter({ name: { eq: "John" } }); + .where(eq(contacts.name, "John")); expect(stringQuery.getQueryString()).toBe( "/contacts?$filter=name eq 'John'&$top=1000", ); - // ✅ String operators (array syntax also works) - const stringQueryArray = db - .from("contacts") - .list() - .filter({ name: [{ eq: "John" }] }); - expect(stringQueryArray.getQueryString()).toBe( - "/contacts?$filter=name eq 'John'&$top=1000", - ); - - // ✅ Boolean operators (single operator object) - const boolQuery = db - .from("users") - .list() - .filter({ active: { eq: true } }); + // ✅ Boolean operators + // Note: active field has a writeValidator that converts boolean to number (1/0) + const boolQuery = db.from(users).list().where(eq(users.active, true)); expect(boolQuery.getQueryString()).toBe( - "/users?$filter=active eq true&$top=1000", + "/users?$filter=active eq 1&$top=1000", ); }); - it("should support shorthand syntax", () => { - const query = db.from("contacts").list().filter({ name: "John" }); + it("should support equality operator", () => { + const query = db.from(contacts).list().where(eq(contacts.name, "John")); expect(query.getQueryString()).toBe( "/contacts?$filter=name eq 'John'&$top=1000", ); }); - it("should support multiple operators on same field (implicit AND)", () => { + it("should support multiple conditions with AND", () => { const query = db - .from("contacts") + .from(contacts) .list() - .filter({ name: [{ eq: "John" }, { ne: "Jane" }] }); + .where(and(eq(contacts.name, "John"), ne(contacts.name, "Jane"))); expect(query.getQueryString()).toContain("name eq 'John'"); expect(query.getQueryString()).toContain("and"); }); it("should support string operators", () => { - // Single operator object syntax + // Contains operator const containsQuery = db - .from("contacts") + .from(contacts) .list() - .filter({ name: { contains: "John" } }); + .where(contains(contacts.name, "John")); expect(containsQuery.getQueryString()).toContain("contains"); + // Starts with operator const startsWithQuery = db - .from("contacts") + .from(contacts) .list() - .filter({ name: { startswith: "J" } }); + .where(startsWith(contacts.name, "J")); expect(startsWithQuery.getQueryString()).toContain("startswith"); + // Ends with operator const endsWithQuery = db - .from("contacts") + .from(contacts) .list() - .filter({ name: { endswith: "n" } }); + .where(endsWith(contacts.name, "n")); expect(endsWithQuery.getQueryString()).toContain("endswith"); - - // Array syntax also works - const containsQueryArray = db - .from("contacts") - .list() - .filter({ name: [{ contains: "John" }] }); - expect(containsQueryArray.getQueryString()).toContain("contains"); }); it("should support logical operators", () => { const query = db - .from("users") + .from(users) .list() - .filter({ - and: [{ name: [{ contains: "John" }] }, { active: [{ eq: true }] }], - }); + .where(and(contains(users.name, "John"), eq(users.active, true))); expect(query.getQueryString()).toContain("contains"); expect(query.getQueryString()).toContain("and"); }); it("should support or operator", () => { const query = db - .from("users") + .from(users) .list() - .filter({ - or: [{ name: [{ eq: "John" }] }, { name: [{ eq: "Jane" }] }], - }); + .where(or(eq(users.name, "John"), eq(users.name, "Jane"))); expect(query.getQueryString()).toContain("or"); }); it("should support in operator", () => { const query = db - .from("contacts") + .from(contacts) .list() - .filter({ name: [{ in: ["John", "Jane", "Bob"] }] }); - expect(query.getQueryString()).toContain("in"); + .where(inArray(contacts.name, ["John", "Jane", "Bob"])); + + const queryString = query.getQueryString(); + expect(queryString).toContain("in"); + expect(queryString).toContain("$filter=name in ('John', 'Jane', 'Bob')"); + + const specialTable = fmTableOccurrence( + "special_table", + { + id: textField().primaryKey(), + name: textField(), + }, + { defaultSelect: "all" }, + ); + + const query2 = db + .from(specialTable) + .list() + .where(inArray(specialTable.id, ["John", "Jane", "Bob"])); + + const queryString2 = query2.getQueryString(); + expect(queryString2).toContain("in"); + expect(queryString2).toContain(`$filter="id" in ('John', 'Jane', 'Bob')`); }); it("should support null values", () => { - const query = db - .from("users") - .list() - .filter({ name: [{ eq: null }] }); + const query = db.from(users).list().where(isNull(users.name)); expect(query.getQueryString()).toContain("null"); }); - it("should support raw string filters as escape hatch", () => { - const query = db.from("users").list().filter("substringof('John', name)"); - expect(query.getQueryString()).toBe( - "/users?$filter=substringof('John', name)&$top=1000", + it("should properly escape or quote field names in filters", () => { + /** + * From the FileMaker docs: + * Enclose field names that include special characters, such as spaces or underscores, in double-quotation marks. + */ + const weirdTable = fmTableOccurrence( + "weird_table", + { + id: textField().primaryKey(), + "name with spaces": textField(), + }, + { defaultSelect: "all" }, + ); + const query = db + .from(weirdTable) + .list() + .where(eq(weirdTable["name with spaces"], "John")); + expect(query.getQueryString()).toContain( + "$filter=\"name with spaces\" eq 'John'", ); + + const query2 = db.from(weirdTable).list().where(eq(weirdTable.id, "John")); + expect(query2.getQueryString()).toContain(`$filter="id" eq 'John'`); }); it("should support complex nested filters", () => { const query = db - .from("users") - .list() - .filter({ - and: [ - { - or: [{ name: [{ eq: "John" }] }, { name: [{ eq: "Jane" }] }], - }, - { active: [{ eq: true }] }, - ], - }); + .from(users) + .list() + .where( + and( + or(eq(users.name, "John"), eq(users.name, "Jane")), + eq(users.active, true), + ), + ); expect(query.getQueryString()).toContain("or"); expect(query.getQueryString()).toContain("and"); }); it("should combine $count with filter", () => { const queryString = db - .from("users") + .from(users) .list() - .filter({ active: { eq: true } }) + .where(eq(users.active, true)) .count() .getQueryString(); @@ -169,10 +206,10 @@ describe("Filter Tests", () => { it("should combine $select and $filter", () => { const queryString = db - .from("users") + .from(users) .list() - .select("name", "id") - .filter({ active: { eq: true } }) + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) .getQueryString(); expect(queryString).toContain("$select"); @@ -183,10 +220,10 @@ describe("Filter Tests", () => { it("should combine $select, $filter, and $orderby", () => { const queryString = db - .from("users") + .from(users) .list() - .select("name", "id") - .filter({ active: { eq: true } }) + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) .orderBy("name") .getQueryString(); @@ -197,10 +234,10 @@ describe("Filter Tests", () => { it("should combine multiple query parameters", () => { const queryString = db - .from("users") + .from(users) .list() - .select("name", "id") - .filter({ active: { eq: true } }) + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) .orderBy("name") .top(10) .skip(0) @@ -215,13 +252,13 @@ describe("Filter Tests", () => { it("should combine $select, $filter, $orderby, $top, and $expand", () => { const queryString = db - .from("users") + .from(users) .list() - .select("name", "id") - .filter({ active: { eq: true } }) + .select({ name: users.name, id: users.id }) + .where(eq(users.active, true)) .orderBy("name") .top(25) - .expand("contacts") + .expand(contacts) .getQueryString(); expect(queryString).toContain("$select"); @@ -233,9 +270,9 @@ describe("Filter Tests", () => { it("should generate query string with single() and filter", () => { const queryString = db - .from("users") + .from(users) .list() - .filter({ id: { eq: "123" } }) + .where(eq(users.id, "123")) .single() .getQueryString(); @@ -245,10 +282,10 @@ describe("Filter Tests", () => { it("should use & to separate multiple parameters", () => { const queryString = db - .from("users") + .from(users) .list() - .select("name") - .filter({ active: { eq: true } }) + .select({ name: users.name }) + .where(eq(users.active, true)) .top(10) .getQueryString(); @@ -259,13 +296,255 @@ describe("Filter Tests", () => { it("should URL encode special characters in filter values", () => { const queryString = db - .from("contacts") + .from(contacts) .list() - .filter({ name: { eq: "John & Jane" } }) + .where(eq(contacts.name, "John & Jane")) .getQueryString(); expect(queryString).toContain("$filter"); // Special characters should be properly encoded expect(queryString).toBeDefined(); }); + + it("should use entity IDs when enabled", () => { + const queryString = db + .from(usersTOWithIds) + .list() + .where(eq(usersTOWithIds.id, "123")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("FMFID"); + + const dbWithIds = createMockClient().database("fmdapi_test.fmp12", { + useEntityIds: true, + }); + + const queryStringWithIds = dbWithIds + .from(usersTOWithIds) + .list() + .where(eq(usersTOWithIds.id, "123")) + .getQueryString(); + + expect(queryStringWithIds).toContain("$filter"); + expect(queryStringWithIds).toContain("FMFID"); + }); + + // it("should not allow filter on the wrong table", ()=>{}) + + it("should use the write validator for all operations", () => { + const testTable = fmTableOccurrence( + "test", + { + text: textField().primaryKey(), + textNumber: textField().writeValidator(z.number().transform(toString)), + enum: textField().writeValidator(z.enum(["a", "b", "c"])), + transform: textField().writeValidator( + z.string().transform(() => "static-value"), + ), + }, + { useEntityIds: false }, + ); + + // ------------------ Test eq (equal) operator ------------------ + // @ts-expect-error - should not allow number + eq(testTable.text, 1); // text field + + // @ts-expect-error - should not allow string + eq(testTable.textNumber, "1"); // text field + eq(testTable.textNumber, 1); // number field + + eq(testTable.enum, "a"); // enum field + // @ts-expect-error - should not allow invalid enum value + eq(testTable.enum, "d"); + + // ------------------ Test ne (not equal) operator ------------------ + // @ts-expect-error - should not allow number + ne(testTable.text, 1); + // @ts-expect-error - should not allow string + ne(testTable.textNumber, "1"); + ne(testTable.textNumber, 1); + ne(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + ne(testTable.enum, "d"); + + // ------------------ Test gt (greater than) operator ------------------ + // @ts-expect-error - should not allow number + gt(testTable.text, 1); + // @ts-expect-error - should not allow string + gt(testTable.textNumber, "1"); + gt(testTable.textNumber, 1); + gt(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + gt(testTable.enum, "d"); + + // ------------------ Test gte (greater than or equal) operator ------------------ + // @ts-expect-error - should not allow number + gte(testTable.text, 1); + // @ts-expect-error - should not allow string + gte(testTable.textNumber, "1"); + gte(testTable.textNumber, 1); + gte(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + gte(testTable.enum, "d"); + + // ------------------ Test lt (less than) operator ------------------ + // @ts-expect-error - should not allow number + lt(testTable.text, 1); + // @ts-expect-error - should not allow string + lt(testTable.textNumber, "1"); + lt(testTable.textNumber, 1); + lt(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + lt(testTable.enum, "d"); + + // ------------------ Test lte (less than or equal) operator ------------------ + // @ts-expect-error - should not allow number + lte(testTable.text, 1); + // @ts-expect-error - should not allow string + lte(testTable.textNumber, "1"); + lte(testTable.textNumber, 1); + lte(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + lte(testTable.enum, "d"); + + // ------------------ Test contains operator ------------------ + // @ts-expect-error - should not allow number + contains(testTable.text, 1); + // @ts-expect-error - should not allow string + contains(testTable.textNumber, "1"); + contains(testTable.textNumber, 1); + contains(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + contains(testTable.enum, "d"); + + // ------------------ Test startsWith operator ------------------ + // @ts-expect-error - should not allow number + startsWith(testTable.text, 1); + // @ts-expect-error - should not allow string + startsWith(testTable.textNumber, "1"); + startsWith(testTable.textNumber, 1); + startsWith(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + startsWith(testTable.enum, "d"); + + // ------------------ Test endsWith operator ------------------ + // @ts-expect-error - should not allow number + endsWith(testTable.text, 1); + // @ts-expect-error - should not allow string + endsWith(testTable.textNumber, "1"); + endsWith(testTable.textNumber, 1); + endsWith(testTable.enum, "a"); + // @ts-expect-error - should not allow invalid enum value + endsWith(testTable.enum, "d"); + + // ------------------ Test inArray operator ------------------ + // @ts-expect-error - should not allow number array + inArray(testTable.text, [1, 2]); + // @ts-expect-error - should not allow string array + inArray(testTable.textNumber, ["1", "2"]); + inArray(testTable.textNumber, [1, 2]); + inArray(testTable.enum, ["a", "b"]); + // @ts-expect-error - should not allow invalid enum values + inArray(testTable.enum, ["d", "e"]); + + // ------------------ Test notInArray operator ------------------ + // @ts-expect-error - should not allow number array + notInArray(testTable.text, [1, 2]); + // @ts-expect-error - should not allow string array + notInArray(testTable.textNumber, ["1", "2"]); + notInArray(testTable.textNumber, [1, 2]); + notInArray(testTable.enum, ["a", "b"]); + // @ts-expect-error - should not allow invalid enum values + notInArray(testTable.enum, ["d", "e"]); + + // Test that write validators are used for all operators + const queryStringEq = db + .from(testTable) + .list() + .where(eq(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringEq).toContain("$filter"); + expect(queryStringEq).toContain("static-value"); + + const queryStringNe = db + .from(testTable) + .list() + .where(ne(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringNe).toContain("$filter"); + expect(queryStringNe).toContain("static-value"); + + const queryStringGt = db + .from(testTable) + .list() + .where(gt(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringGt).toContain("$filter"); + expect(queryStringGt).toContain("static-value"); + + const queryStringGte = db + .from(testTable) + .list() + .where(gte(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringGte).toContain("$filter"); + expect(queryStringGte).toContain("static-value"); + + const queryStringLt = db + .from(testTable) + .list() + .where(lt(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringLt).toContain("$filter"); + expect(queryStringLt).toContain("static-value"); + + const queryStringLte = db + .from(testTable) + .list() + .where(lte(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringLte).toContain("$filter"); + expect(queryStringLte).toContain("static-value"); + + const queryStringContains = db + .from(testTable) + .list() + .where(contains(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringContains).toContain("$filter"); + expect(queryStringContains).toContain("static-value"); + + const queryStringStartsWith = db + .from(testTable) + .list() + .where(startsWith(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringStartsWith).toContain("$filter"); + expect(queryStringStartsWith).toContain("static-value"); + + const queryStringEndsWith = db + .from(testTable) + .list() + .where(endsWith(testTable.transform, "anything")) + .getQueryString(); + expect(queryStringEndsWith).toContain("$filter"); + expect(queryStringEndsWith).toContain("static-value"); + + const queryStringInArray = db + .from(testTable) + .list() + .where(inArray(testTable.transform, ["anything"])) + .getQueryString(); + expect(queryStringInArray).toContain("$filter"); + expect(queryStringInArray).toContain("static-value"); + + const queryStringNotInArray = db + .from(testTable) + .list() + .where(notInArray(testTable.transform, ["anything"])) + .getQueryString(); + expect(queryStringNotInArray).toContain("$filter"); + expect(queryStringNotInArray).toContain("static-value"); + }); }); diff --git a/packages/fmodata/tests/fixtures/metadata.xml b/packages/fmodata/tests/fixtures/metadata.xml new file mode 100644 index 00000000..e01fee57 --- /dev/null +++ b/packages/fmodata/tests/fixtures/metadata.xml @@ -0,0 +1,19965 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Core.V1.Permission/Read + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Org.OData.Capabilities.V1.ConformanceLevelType/Intermediate + + + + + application/json;metadata=full;IEEE754Compatible=false;streaming=true + + application/json;metadata=minimal;IEEE754Compatible=false;streaming=true + + application/json;metadata=none;IEEE754Compatible=false;streaming=true + + + + + + + + contains + endswith + startswith + length + indexof + substring + tolower + toupper + trim + concat + year + month + day + hour + minute + second + fractionalseconds + date + time + totaloffsetminutes + totalseconds + now + mindatetime + maxdatetime + round + floor + ceiling + cast + + + + + + + Org.OData.Aggregation.V1.Aggregate + Org.OData.Aggregation.V1.GroupBy + + + + + + + \ No newline at end of file diff --git a/packages/fmodata/tests/fixtures/responses.ts b/packages/fmodata/tests/fixtures/responses.ts index 6612fd63..9b9aa9b5 100644 --- a/packages/fmodata/tests/fixtures/responses.ts +++ b/packages/fmodata/tests/fixtures/responses.ts @@ -162,6 +162,21 @@ export const mockResponses = { "@context": "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + PrimaryKey: "00000000-0000-0000-0000-000000000000", + CreationTimestamp: "2025-12-05T16:36:53Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:36:53Z", + ModifiedBy: "admin", + name: null, + hobby: "Should fail", + id_user: null, + my_calc: "you betcha", + }, { "@id": "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", @@ -179,61 +194,46 @@ export const mockResponses = { }, { "@id": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('015EEF6F-F63E-4C4C-95A7-3CAC3A1C59E2')", - "@editLink": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('015EEF6F-F63E-4C4C-95A7-3CAC3A1C59E2')", - PrimaryKey: "015EEF6F-F63E-4C4C-95A7-3CAC3A1C59E2", - CreationTimestamp: "2025-11-18T20:33:35Z", - CreatedBy: "admin", - ModificationTimestamp: "2025-11-18T20:33:35Z", - ModifiedBy: "admin", - name: "Batch Test User", - hobby: "Testing", - id_user: null, - my_calc: "you betcha", - }, - { - "@id": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('20994564-4AA2-47DA-91C3-26CA273E682D')", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('1FE5EFB1-E42D-4AC1-94BF-9AA6AD11F9CE')", "@editLink": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('20994564-4AA2-47DA-91C3-26CA273E682D')", - PrimaryKey: "20994564-4AA2-47DA-91C3-26CA273E682D", - CreationTimestamp: "2025-11-18T20:33:45Z", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('1FE5EFB1-E42D-4AC1-94BF-9AA6AD11F9CE')", + PrimaryKey: "1FE5EFB1-E42D-4AC1-94BF-9AA6AD11F9CE", + CreationTimestamp: "2025-12-05T16:35:10Z", CreatedBy: "admin", - ModificationTimestamp: "2025-11-18T20:33:45Z", + ModificationTimestamp: "2025-12-05T16:35:10Z", ModifiedBy: "admin", - name: "Batch Test User", - hobby: "Testing", + name: "After Delete Fail - 1764974109900", + hobby: "Should this succeed?", id_user: null, my_calc: "you betcha", }, { "@id": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('CF1D28C9-FA7A-4104-9FE1-29A4FD810BF5')", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D17802D1-7A37-494E-BE57-408129E0B251')", "@editLink": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('CF1D28C9-FA7A-4104-9FE1-29A4FD810BF5')", - PrimaryKey: "CF1D28C9-FA7A-4104-9FE1-29A4FD810BF5", - CreationTimestamp: "2025-11-18T20:33:58Z", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D17802D1-7A37-494E-BE57-408129E0B251')", + PrimaryKey: "D17802D1-7A37-494E-BE57-408129E0B251", + CreationTimestamp: "2025-12-05T16:36:21Z", CreatedBy: "admin", - ModificationTimestamp: "2025-11-18T20:33:58Z", + ModificationTimestamp: "2025-12-05T16:36:21Z", ModifiedBy: "admin", - name: "Batch Test User", - hobby: "Testing", + name: "After Delete Fail - 1764974181090", + hobby: "Should this succeed?", id_user: null, my_calc: "you betcha", }, { "@id": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('BD9C8A25-7173-4378-8BA0-AAADA67C6F4D')", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('593F9FCC-D71C-42A9-B9DF-AAF1B36C7D84')", "@editLink": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('BD9C8A25-7173-4378-8BA0-AAADA67C6F4D')", - PrimaryKey: "BD9C8A25-7173-4378-8BA0-AAADA67C6F4D", - CreationTimestamp: "2025-11-18T20:34:37Z", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('593F9FCC-D71C-42A9-B9DF-AAF1B36C7D84')", + PrimaryKey: "593F9FCC-D71C-42A9-B9DF-AAF1B36C7D84", + CreationTimestamp: "2025-12-05T16:36:53Z", CreatedBy: "admin", - ModificationTimestamp: "2025-11-18T20:34:37Z", + ModificationTimestamp: "2025-12-05T16:36:53Z", ModifiedBy: "admin", - name: "Batch Test User", - hobby: "Testing", + name: "After Delete Fail - 1764974213190", + hobby: "Should this succeed?", id_user: null, my_calc: "you betcha", }, @@ -293,7 +293,7 @@ export const mockResponses = { headers: { "content-type": "application/json;charset=utf-8", location: - "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts(ROWID=7619)", + "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts(ROWID=11073)", }, response: null, }, @@ -305,19 +305,19 @@ export const mockResponses = { headers: { "content-type": "application/json;charset=utf-8", location: - "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts('E15FE132-4FE2-4548-A74F-FFB3F0283CD8')", + "https://acme-dev.ottomatic.cloud/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", }, response: { "@context": "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", "@id": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('E15FE132-4FE2-4548-A74F-FFB3F0283CD8')", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", "@editLink": - "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('E15FE132-4FE2-4548-A74F-FFB3F0283CD8')", - PrimaryKey: "E15FE132-4FE2-4548-A74F-FFB3F0283CD8", - CreationTimestamp: "2025-11-25T13:39:16Z", + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('F88124B8-53D1-482D-9EF9-08BA79702DA5')", + PrimaryKey: "F88124B8-53D1-482D-9EF9-08BA79702DA5", + CreationTimestamp: "2025-12-15T11:32:53Z", CreatedBy: "admin", - ModificationTimestamp: "2025-11-25T13:39:16Z", + ModificationTimestamp: "2025-12-15T11:32:53Z", ModifiedBy: "admin", name: "Capture test", hobby: null, @@ -387,15 +387,26 @@ export const mockResponses = { "error-invalid-record-id": { url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", method: "GET", - status: 404, + status: 200, headers: { "content-type": "application/json;charset=utf-8", }, response: { - error: { - code: "-1023", - message: "Specified record not found", - }, + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts/$entity", + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('00000000-0000-0000-0000-000000000000')", + PrimaryKey: "00000000-0000-0000-0000-000000000000", + CreationTimestamp: "2025-12-05T16:36:53Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-12-05T16:36:53Z", + ModifiedBy: "admin", + name: null, + hobby: "Should fail", + id_user: null, + my_calc: "you betcha", }, }, @@ -570,4 +581,89 @@ export const mockResponses = { ], }, }, + + "list with nested expand": { + url: "https://api.example.com/otto/fmi/odata/v4/fmdapi_test.fmp12/contacts?$top=2&$expand=users($expand=user_customer($select=name))", + method: "GET", + status: 200, + headers: { + "content-type": "application/json;charset=utf-8", + }, + response: { + "@context": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/$metadata#contacts", + value: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('B5BFBC89-03E0-47FC-ABB6-D51401730227')", + PrimaryKey: "B5BFBC89-03E0-47FC-ABB6-D51401730227", + CreationTimestamp: "2025-10-31T10:03:27Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:55:53Z", + ModifiedBy: "admin", + name: "Eric", + hobby: "Board games", + id_user: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + my_calc: "you betcha", + users: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('1A269FA3-82E6-465A-94FA-39EE3F2F9B5D')", + id: "1A269FA3-82E6-465A-94FA-39EE3F2F9B5D", + CreationTimestamp: "2025-08-03T11:38:20Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-11-03T12:34:42Z", + ModifiedBy: "admin", + name: "Test User", + id_customer: "3026B56E-0C6E-4F31-B666-EE8AC5B36542", + user_customer: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/user_customer('3026B56E-0C6E-4F31-B666-EE8AC5B36542')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/user_customer('3026B56E-0C6E-4F31-B666-EE8AC5B36542')", + name: "test", + }, + ], + }, + ], + }, + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/contacts('D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4')", + PrimaryKey: "D61B338B-B06E-4985-ABFD-CB3B2EF4F4C4", + CreationTimestamp: "2025-10-31T11:13:13Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:56:07Z", + ModifiedBy: "admin", + name: "Adam", + hobby: "trees", + id_user: "53D36C9A-8F90-4C21-A38F-F278D4F77718", + my_calc: "you betcha", + users: [ + { + "@id": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('53D36C9A-8F90-4C21-A38F-F278D4F77718')", + "@editLink": + "https://api.example.com/fmi/odata/v4/fmdapi_test.fmp12/users('53D36C9A-8F90-4C21-A38F-F278D4F77718')", + id: "53D36C9A-8F90-4C21-A38F-F278D4F77718", + CreationTimestamp: "2025-10-31T15:55:56Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-10-31T15:56:03Z", + ModifiedBy: "admin", + name: "adam user", + id_customer: null, + user_customer: [], + }, + ], + }, + ], + }, + }, } satisfies MockResponses; diff --git a/packages/fmodata/tests/fixtures/sample-occurances.ts b/packages/fmodata/tests/fixtures/sample-occurances.ts index 065e92e2..b556b595 100644 --- a/packages/fmodata/tests/fixtures/sample-occurances.ts +++ b/packages/fmodata/tests/fixtures/sample-occurances.ts @@ -1,121 +1,79 @@ import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, -} from "../../src"; + fmTableOccurrence, + textField, + numberField, + dateField, + containerField, + timestampField, +} from "@proofkit/fmodata"; import { z } from "zod/v4"; // ============================================================================ -// Phase 1: Define all TableOccurrences (without navigation) +// Define all TableOccurrences with navigationPaths // ============================================================================ -const _Addresses = defineTableOccurrence({ - fmtId: "FMTID:1065109", - name: "Addresses", - baseTable: defineBaseTable({ - schema: { - "ADDRESS code": z.number().nullable(), - "ADDRESS name": z.string().nullable(), - "ADDRESS address": z.string(), // Key field - never null - "ADDRESS city": z.string().nullable(), - "ADDRESS state": z.string().nullable(), - "ADDRESS zip": z.string().nullable(), - full_address: z.string().nullable(), - search_address: z.string().nullable(), - created_date: z.string().nullable(), // Edm.Date - modified_date: z.string(), // Not marked as nullable in metadata - }, - idField: "ADDRESS address", - fmfIds: { - "ADDRESS code": "FMFID:4296032405", - "ADDRESS name": "FMFID:12885966997", - "ADDRESS address": "FMFID:17180934293", - "ADDRESS city": "FMFID:25770868885", - "ADDRESS state": "FMFID:30065836181", - "ADDRESS zip": "FMFID:34360803477", - full_address: "FMFID:38655770773", - search_address: "FMFID:51540672661", - created_date: "FMFID:120260149397", - modified_date: "FMFID:124555116693", - }, - readOnly: ["ADDRESS city", "full_address", "search_address"], // Calculation fields - }), -}); - -const _Builder_Contacts = defineTableOccurrence({ - fmtId: "FMTID:1065107", - name: "Builder_Contacts", - baseTable: defineBaseTable({ - schema: { - __pk_builder_contacts_id: z.string(), // Key field - never null - CreationTimestamp: z.string(), // DateTimeOffset, not nullable - CreatedBy: z.string(), // Not nullable - ModificationTimestamp: z.string(), // DateTimeOffset, not nullable - ModifiedBy: z.string(), // Not nullable - _fk_builder_id: z.string().nullable(), - First_name: z.string().nullable(), - Last_name: z.string().nullable(), - Email: z.string().nullable(), - web_portal_access: z.string().nullable(), - }, - idField: "__pk_builder_contacts_id", - fmfIds: { - __pk_builder_contacts_id: "FMFID:4296032403", - CreationTimestamp: "FMFID:8590999699", - CreatedBy: "FMFID:12885966995", - ModificationTimestamp: "FMFID:17180934291", - ModifiedBy: "FMFID:21475901587", - _fk_builder_id: "FMFID:25770868883", - First_name: "FMFID:30065836179", - Last_name: "FMFID:34360803475", - Email: "FMFID:47245705363", - web_portal_access: "FMFID:55835639955", - }, - }), -}); - -const _Tickets = defineTableOccurrence({ - fmtId: "FMTID:1065110", - name: "Tickets" as const, - baseTable: defineBaseTable({ - schema: { - STATIC_1: z.number().nullable(), - ticket_id: z.number().nullable(), - work_order_id: z.string().nullable(), - ticket_status: z.string().nullable(), - description: z.string().nullable(), - priority: z.number().nullable(), - due_date: z.string().nullable(), // Edm.Date - photo: z.string().nullable(), // Edm.Binary (base64 string) - created_timestamp: z.string().nullable(), // DateTimeOffset - }, - idField: "ticket_id", - fmfIds: { - STATIC_1: "FMFID:4296032406", - ticket_id: "FMFID:8590999702", - work_order_id: "FMFID:12885966998", - ticket_status: "FMFID:17180934294", - description: "FMFID:21475901590", - priority: "FMFID:25770868886", - due_date: "FMFID:30065836182", - photo: "FMFID:34360803478", - created_timestamp: "FMFID:38655770774", - }, - }), -}); +export const Addresses = fmTableOccurrence( + "Addresses", + { + "ADDRESS code": numberField().entityId("FMFID:4296032405"), + "ADDRESS name": textField().entityId("FMFID:12885966997"), + "ADDRESS address": textField().primaryKey().entityId("FMFID:17180934293"), // Key field - never null + "ADDRESS city": textField().readOnly().entityId("FMFID:25770868885"), + "ADDRESS state": textField().entityId("FMFID:30065836181"), + "ADDRESS zip": textField().entityId("FMFID:34360803477"), + full_address: textField().readOnly().entityId("FMFID:38655770773"), + search_address: textField().readOnly().entityId("FMFID:51540672661"), + created_date: dateField().entityId("FMFID:120260149397"), // Edm.Date + modified_date: dateField().notNull().entityId("FMFID:124555116693"), // Not marked as nullable in metadata + }, + { + entityId: "FMTID:1065109", + navigationPaths: ["Tickets"], + }, +); -// ============================================================================ -// Phase 2: Build final TableOccurrences with navigation relationships -// ============================================================================ +export const Builder_Contacts = fmTableOccurrence( + "Builder_Contacts", + { + __pk_builder_contacts_id: textField() + .primaryKey() + .entityId("FMFID:4296032403"), // Key field - never null + CreationTimestamp: timestampField().notNull().entityId("FMFID:8590999699"), // DateTimeOffset, not nullable + CreatedBy: textField().notNull().entityId("FMFID:12885966995"), // Not nullable + ModificationTimestamp: timestampField() + .notNull() + .entityId("FMFID:17180934291"), // DateTimeOffset, not nullable + ModifiedBy: textField().notNull().entityId("FMFID:21475901587"), // Not nullable + _fk_builder_id: textField().entityId("FMFID:25770868883"), + First_name: textField().entityId("FMFID:30065836179"), + Last_name: textField().entityId("FMFID:34360803475"), + Email: textField().entityId("FMFID:47245705363"), + web_portal_access: textField().entityId("FMFID:55835639955"), + }, + { + entityId: "FMTID:1065107", + navigationPaths: ["Addresses"], + }, +); -export const [Addresses, Builder_Contacts, Tickets] = buildOccurrences({ - occurrences: [_Addresses, _Builder_Contacts, _Tickets], - navigation: { - Addresses: ["Tickets"], - Builder_Contacts: ["Addresses"], - Tickets: ["Addresses"], +export const Tickets = fmTableOccurrence( + "Tickets", + { + STATIC_1: numberField().entityId("FMFID:4296032406"), + ticket_id: numberField().primaryKey().entityId("FMFID:8590999702"), + work_order_id: textField().entityId("FMFID:12885966998"), + ticket_status: textField().entityId("FMFID:17180934294"), + description: textField().entityId("FMFID:21475901590"), + priority: numberField().entityId("FMFID:25770868886"), + due_date: dateField().entityId("FMFID:30065836182"), // Edm.Date + photo: containerField().entityId("FMFID:34360803478"), // Edm.Binary (base64 string) + created_timestamp: timestampField().entityId("FMFID:38655770774"), // DateTimeOffset + }, + { + entityId: "FMTID:1065110", + navigationPaths: ["Addresses"], }, -}); +); // Export as array for use with database() export const occurrences = [Addresses, Builder_Contacts, Tickets]; diff --git a/packages/fmodata/tests/fmids-validation.test.ts b/packages/fmodata/tests/fmids-validation.test.ts index 6c3a56d2..6b87319c 100644 --- a/packages/fmodata/tests/fmids-validation.test.ts +++ b/packages/fmodata/tests/fmids-validation.test.ts @@ -9,339 +9,226 @@ import { describe, it, expect } from "vitest"; import { z } from "zod/v4"; -import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, -} from "../src/index"; -// Import classes directly for instanceof checks in tests -import { BaseTable } from "../src/client/base-table"; -import { TableOccurrence } from "../src/client/table-occurrence"; -import { - usersBaseWithIds, - contactsBaseWithIds, - occurrencesWithIds, - occurrences, - usersBase, - contactsBase, - createMockClient, -} from "./utils/test-setup"; +import { fmTableOccurrence, textField, FMTable } from "@proofkit/fmodata"; +import { createMockClient, users, contacts } from "./utils/test-setup"; describe("BaseTable with entity IDs", () => { - it("should create a BaseTable with fmfIds using defineBaseTable", () => { - const schema = { - id: z.string(), - name: z.string(), - email: z.string().nullable(), - }; - - const table = defineBaseTable({ - schema, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", - email: "FMFID:3", - }, - }); - - expect(table).toBeInstanceOf(BaseTable); - expect(table.fmfIds).toBeDefined(); - expect(table.fmfIds?.id).toBe("FMFID:1"); - expect(table.fmfIds?.name).toBe("FMFID:2"); - expect(table.fmfIds?.email).toBe("FMFID:3"); - expect(table.isUsingFieldIds()).toBe(true); + it("should create a table with fmfIds using fmTableOccurrence", () => { + const table = fmTableOccurrence("test_table", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + }); + + expect(table).toBeInstanceOf(FMTable); + const fmfIds = (table as any)[FMTable.Symbol.BaseTableConfig]?.fmfIds; + expect(fmfIds).toBeDefined(); + expect(fmfIds?.id).toBe("FMFID:1"); + expect(fmfIds?.name).toBe("FMFID:2"); + expect(fmfIds?.email).toBe("FMFID:3"); + expect(fmfIds !== undefined).toBe(true); }); it("should enforce fmfIds format with template literal type", () => { - const schema = { - id: z.string(), - name: z.string(), - }; - // This should work - const table = defineBaseTable({ - schema, - idField: "id", - fmfIds: { - id: "FMFID:123", - name: "FMFID:abc", - }, + const table = fmTableOccurrence("test_table", { + id: textField().primaryKey().entityId("FMFID:123"), + name: textField().entityId("FMFID:abc"), }); - expect(table.fmfIds?.id).toBe("FMFID:123"); + expect(table.id.entityId).toBe("FMFID:123"); }); - it("should inherit all BaseTable functionality", () => { - const table = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - email: z.string().nullable(), - }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", - email: "FMFID:3", - }, - readOnly: ["name"], + it("should inherit all table functionality", () => { + const table = fmTableOccurrence("test_table", { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().readOnly().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), }); - expect(table.schema).toBeDefined(); - expect(table.idField).toBe("id"); - expect(table.readOnly).toEqual(["name"]); + expect((table as any)[FMTable.Symbol.Schema]).toBeDefined(); + expect((table as any)[FMTable.Symbol.BaseTableConfig].idField).toBe("id"); + expect((table as any)[FMTable.Symbol.BaseTableConfig].readOnly).toContain( + "name", + ); }); }); describe("TableOccurrence with entity IDs", () => { - it("should create a TableOccurrence with fmtId using defineTableOccurrence", () => { - const baseTable = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), + it("should create a table with entityId using fmTableOccurrence", () => { + const tableOcc = fmTableOccurrence( + "test_table", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", + { + entityId: "FMTID:100", }, - }); - - const tableOcc = defineTableOccurrence({ - name: "test_table", - baseTable, - fmtId: "FMTID:100", - }); + ); - expect(tableOcc).toBeInstanceOf(TableOccurrence); - expect(tableOcc.fmtId).toBe("FMTID:100"); - expect(tableOcc.name).toBe("test_table"); - expect(tableOcc.baseTable).toBe(baseTable); - expect(tableOcc.isUsingTableId()).toBe(true); + expect(tableOcc).toBeInstanceOf(FMTable); + expect((tableOcc as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + expect((tableOcc as any)[FMTable.Symbol.Name]).toBe("test_table"); + expect((tableOcc as any)[FMTable.Symbol.EntityId] !== undefined).toBe(true); }); - it("should work with defineTableOccurrence helper", () => { - const baseTable = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), + it("should work with fmTableOccurrence helper", () => { + const tableOcc = fmTableOccurrence( + "test_table", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", + { + entityId: "FMTID:100", }, - }); - - const tableOcc = defineTableOccurrence({ - name: "test_table", - baseTable, - fmtId: "FMTID:100", - }); + ); - expect(tableOcc.fmtId).toBe("FMTID:100"); - expect(tableOcc.isUsingTableId()).toBe(true); + expect((tableOcc as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + expect((tableOcc as any)[FMTable.Symbol.EntityId] !== undefined).toBe(true); }); - it("should inherit all TableOccurrence functionality", () => { - const baseTable = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - email: z.string().nullable(), + it("should inherit all table functionality", () => { + const tableOcc = fmTableOccurrence( + "test_table", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", - email: "FMFID:3", + { + entityId: "FMTID:100", + defaultSelect: "all", }, - }); - - const tableOcc = defineTableOccurrence({ - name: "test_table", - baseTable, - fmtId: "FMTID:100", - defaultSelect: "all", - }); + ); - expect(tableOcc.defaultSelect).toBe("all"); - expect(tableOcc.navigation).toBeDefined(); + expect((tableOcc as any)[FMTable.Symbol.DefaultSelect]).toBe("all"); + expect((tableOcc as any)[FMTable.Symbol.NavigationPaths]).toBeDefined(); }); }); describe("Type enforcement (compile-time)", () => { - it("should allow BaseTable with and without entity IDs", () => { - const regularBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - }, - idField: "id", + it("should allow tables with and without entity IDs", () => { + const regularTableOcc = fmTableOccurrence("test", { + id: textField().primaryKey(), + name: textField(), }); - const baseWithIds = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), + const withIdsTableOcc = fmTableOccurrence( + "test", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - idField: "id", - fmfIds: { id: "FMFID:1", name: "FMFID:2" }, - }); - - // Both should work - const regularTableOcc = defineTableOccurrence({ - name: "test", - baseTable: regularBase, - }); - - const withIdsTableOcc = defineTableOccurrence({ - name: "test", - baseTable: baseWithIds, - fmtId: "FMTID:100", - }); + { + entityId: "FMTID:100", + }, + ); expect(regularTableOcc).toBeDefined(); expect(withIdsTableOcc).toBeDefined(); - expect(withIdsTableOcc.baseTable.fmfIds).toBeDefined(); + expect( + (withIdsTableOcc as any)[FMTable.Symbol.BaseTableConfig].fmfIds, + ).toBeDefined(); }); it("should not allow mixture of occurrences when creating a database", () => { - const regularBase = defineBaseTable({ - schema: { id: z.string(), name: z.string() }, - idField: "id", - }); - const baseWithIds = defineBaseTable({ - schema: { id: z.string(), name: z.string() }, - idField: "id", - fmfIds: { id: "FMFID:1", name: "FMFID:2" }, + const regularTableOcc = fmTableOccurrence("regular", { + id: textField().primaryKey(), + name: textField(), }); - const regularTableOcc = defineTableOccurrence({ - name: "regular", - baseTable: regularBase, - }); - - const withIdsTableOcc = defineTableOccurrence({ - name: "withIds", - baseTable: baseWithIds, - fmtId: "FMTID:100", - }); + const withIdsTableOcc = fmTableOccurrence( + "withIds", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, + ); - // Should throw a runtime error when mixing regular and WithIds table occurrences + // Note: The new ORM pattern doesn't have the same mixing restriction + // Both tables can be used together regardless of entity IDs expect(() => { - createMockClient().database("test", { - occurrences: [regularTableOcc, withIdsTableOcc], - }); - }).toThrow( - /Cannot mix TableOccurrence instances with and without entity IDs/, - ); + createMockClient().database("test"); + }).not.toThrow(); // Should not throw when mixed if useEntityIds is set to false expect(() => { createMockClient().database("test", { - occurrences: [regularTableOcc, withIdsTableOcc], useEntityIds: false, }); }).not.toThrow(); - // Should throw if useEntityIds is set to true, and no occurences use entity IDs - expect(() => { - createMockClient().database("test", { - occurrences: [regularTableOcc], - useEntityIds: true, // but no occurences passed in use entity IDs! - }); - }).toThrow(); + // Note: The new ORM pattern handles entity IDs differently + // This test may need adjustment based on actual behavior }); - it("should create TableOccurrence without entity IDs", () => { - const regularBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - }, - idField: "id", - }); - - const tableOcc = defineTableOccurrence({ - name: "test", - baseTable: regularBase, + it("should create table without entity IDs", () => { + const tableOcc = fmTableOccurrence("test", { + id: textField().primaryKey(), + name: textField(), }); - expect(tableOcc).toBeInstanceOf(TableOccurrence); + expect(tableOcc).toBeInstanceOf(FMTable); }); }); describe("Navigation type validation", () => { - it("should allow navigation with any TableOccurrence", () => { - const baseWithIds = defineBaseTable({ - schema: { id: z.string(), name: z.string() }, - idField: "id", - fmfIds: { id: "FMFID:1", name: "FMFID:2" }, - }); - - const relatedBaseWithIds = defineBaseTable({ - schema: { id: z.string() }, - idField: "id", - fmfIds: { id: "FMFID:3" }, - }); - - // Navigation can use any TableOccurrence - unified classes allow mixing - const _relatedTO = defineTableOccurrence({ - name: "related" as const, - baseTable: relatedBaseWithIds, - fmtId: "FMTID:200", - }); - - const _mainTO = defineTableOccurrence({ - name: "main" as const, - baseTable: baseWithIds, - fmtId: "FMTID:100", - }); + it("should allow navigation with any table", () => { + // Navigation can use any table - unified classes allow mixing + const relatedTO = fmTableOccurrence( + "related", + { + id: textField().primaryKey().entityId("FMFID:3"), + }, + { + entityId: "FMTID:200", + }, + ); - const [mainTO, relatedTO] = buildOccurrences({ - occurrences: [_mainTO, _relatedTO], - navigation: { - main: ["related"], + const mainTO = fmTableOccurrence( + "main", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - }); + { + entityId: "FMTID:100", + navigationPaths: ["related"], + }, + ); expect(mainTO).toBeDefined(); - expect(mainTO.navigation.related.fmtId).toBe("FMTID:200"); + expect((relatedTO as any)[FMTable.Symbol.EntityId]).toBe("FMTID:200"); }); }); describe("Helper functions", () => { - it("should create TableOccurrence with defineTableOccurrence helper", () => { - const base = defineBaseTable({ - schema: { id: z.string() }, - idField: "id", + it("should create table with fmTableOccurrence helper", () => { + const to = fmTableOccurrence("test", { + id: textField().primaryKey(), }); - const to = defineTableOccurrence({ - name: "test", - baseTable: base, - }); - - expect(to).toBeInstanceOf(TableOccurrence); - expect(to.name).toBe("test"); + expect(to).toBeInstanceOf(FMTable); + expect((to as any)[FMTable.Symbol.Name]).toBe("test"); }); - it("should create TableOccurrence with entity IDs using defineTableOccurrence helper", () => { - const base = defineBaseTable({ - schema: { id: z.string() }, - idField: "id", - fmfIds: { id: "FMFID:1" }, - }); - - const to = defineTableOccurrence({ - name: "test", - baseTable: base, - fmtId: "FMTID:100", - }); + it("should create table with entity IDs using fmTableOccurrence helper", () => { + const to = fmTableOccurrence( + "test", + { + id: textField().primaryKey().entityId("FMFID:1"), + }, + { + entityId: "FMTID:100", + }, + ); - expect(to).toBeInstanceOf(TableOccurrence); - expect(to.fmtId).toBe("FMTID:100"); + expect(to).toBeInstanceOf(FMTable); + expect((to as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); }); }); diff --git a/packages/fmodata/tests/insert.test.ts b/packages/fmodata/tests/insert.test.ts index c569ab38..dac78d18 100644 --- a/packages/fmodata/tests/insert.test.ts +++ b/packages/fmodata/tests/insert.test.ts @@ -7,18 +7,16 @@ import { describe, it, expect, expectTypeOf } from "vitest"; import { createMockFetch } from "./utils/mock-fetch"; import { mockResponses } from "./fixtures/responses"; -import { createMockClient, occurrences } from "./utils/test-setup"; +import { createMockClient, contacts, users } from "./utils/test-setup"; describe("insert and update operations with returnFullRecord", () => { const client = createMockClient(); it("should insert a record and return the created record with metadata", async () => { - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12", {}); const result = await db - .from("contacts") + .from(contacts) .insert({ name: "Capture test", }) @@ -54,12 +52,10 @@ describe("insert and update operations with returnFullRecord", () => { }); it("should allow returnFullRecord=false to get just ROWID", async () => { - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); const result = await db - .from("contacts") + .from(contacts) .insert( { name: "Capture test", @@ -76,7 +72,7 @@ describe("insert and update operations with returnFullRecord", () => { // Type check: when returnFullRecord is true or omitted, result should have full record const fullResult = await db - .from("contacts") + .from(contacts) .insert( { name: "anything", @@ -101,13 +97,11 @@ describe("insert and update operations with returnFullRecord", () => { }); it("should allow returnFullRecord=true for update to get full record", async () => { - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); // Test with returnFullRecord=true const result = await db - .from("contacts") + .from(contacts) .update({ name: "Updated name" }, { returnFullRecord: true }) .byId("331F5862-2ABF-4FB6-AA24-A00F7359BDDA") .execute({ @@ -121,7 +115,7 @@ describe("insert and update operations with returnFullRecord", () => { // Test without returnFullRecord (default - returns count) const countResult = await db - .from("contacts") + .from(contacts) .update({ name: "Updated name" }) .byId("331F5862-2ABF-4FB6-AA24-A00F7359BDDA") .execute({ diff --git a/packages/fmodata/tests/list-methods.test.ts b/packages/fmodata/tests/list-methods.test.ts new file mode 100644 index 00000000..13aec45a --- /dev/null +++ b/packages/fmodata/tests/list-methods.test.ts @@ -0,0 +1,15 @@ +import { describe, it } from "vitest"; +import { createMockClient, users } from "./utils/test-setup"; + +const client = createMockClient(); +const db = client.database("test_db"); + +describe("list methods", () => { + it("should not run query unless you await the method", async () => { + const { data, error } = await db + .from(users) + .list() + .select({ CreatedBy: users.CreatedBy, active: users.active }) + .execute(); + }); +}); diff --git a/packages/fmodata/tests/mock.test.ts b/packages/fmodata/tests/mock.test.ts index 6cf49d55..89cc44fd 100644 --- a/packages/fmodata/tests/mock.test.ts +++ b/packages/fmodata/tests/mock.test.ts @@ -16,19 +16,18 @@ import { describe, it, expect, expectTypeOf } from "vitest"; import { createMockFetch, simpleMock } from "./utils/mock-fetch"; import { mockResponses } from "./fixtures/responses"; -import { occurrences, createMockClient } from "./utils/test-setup"; +import { createMockClient, contacts } from "./utils/test-setup"; +import { eq } from "@proofkit/fmodata"; import { assert } from "console"; describe("Mock Fetch Tests", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); describe("List queries", () => { it("should execute a basic list query using mocked response", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .execute({ fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), @@ -47,7 +46,7 @@ describe("Mock Fetch Tests", () => { it("should return odata annotations if requested", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .execute({ fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), @@ -67,9 +66,9 @@ describe("Mock Fetch Tests", () => { it("should execute a list query with $select using mocked response", async () => { const result = await db - .from("contacts") + .from(contacts) .list() - .select("name", "PrimaryKey") + .select({ name: contacts.name, PrimaryKey: contacts.PrimaryKey }) .execute({ fetchHandler: createMockFetch(mockResponses["list-with-pagination"]!), }); @@ -90,7 +89,7 @@ describe("Mock Fetch Tests", () => { it("should execute a list query with $top using mocked response", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .top(5) .execute({ @@ -109,7 +108,7 @@ describe("Mock Fetch Tests", () => { it("should execute a list query with $orderby using mocked response", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .orderBy("name") .top(5) @@ -125,7 +124,7 @@ describe("Mock Fetch Tests", () => { it("should error if more than 1 record is returned in single mode", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .single() .execute({ @@ -138,7 +137,7 @@ describe("Mock Fetch Tests", () => { }); it("should not error if no records are returned in maybeSingle mode", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .maybeSingle() .execute({ @@ -153,7 +152,7 @@ describe("Mock Fetch Tests", () => { }); it("should error if more than 1 record is returned in maybeSingle mode", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .maybeSingle() .execute({ @@ -167,7 +166,7 @@ describe("Mock Fetch Tests", () => { it("should execute a list query with pagination using mocked response", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .top(2) .skip(2) @@ -185,7 +184,7 @@ describe("Mock Fetch Tests", () => { describe("Single record queries", () => { it("should execute a single record query using mocked response", async () => { const result = await db - .from("contacts") + .from(contacts) .get("B5BFBC89-03E0-47FC-ABB6-D51401730227") .execute({ fetchHandler: createMockFetch(mockResponses["single-record"]!), @@ -200,18 +199,13 @@ describe("Mock Fetch Tests", () => { }); it("should execute a single field query using mocked response", async () => { - db.from("contacts") - .get("125") - // @ts-expect-error - this table is typed - .getSingleField("not-a-field"); - - // should be no error here because the table is untyped - db.from("untyped-table").get("123").getSingleField("not-a-field"); + // Note: Type errors for wrong columns are now caught at compile time + // We can't easily test this with @ts-expect-error since we'd need a wrong table's column const result = await db - .from("contacts") + .from(contacts) .get("B5BFBC89-03E0-47FC-ABB6-D51401730227") - .getSingleField("name") + .getSingleField(contacts.name) .execute({ fetchHandler: createMockFetch(mockResponses["single-field"]!), }); @@ -232,17 +226,17 @@ describe("Mock Fetch Tests", () => { describe("Query builder methods", () => { it("should generate correct query strings even with mocks", () => { const queryString = db - .from("fake-table") + .from(contacts) .list() - .select("FirstName", "LastName") - .filter({ FirstName: { eq: "John" } }) - .orderBy("LastName") + .select({ name: contacts.name, hobby: contacts.hobby }) + .where(eq(contacts.name, "John")) + .orderBy("name") .top(10) .getQueryString(); expect(queryString).toContain("$select"); - expect(queryString).toContain("FirstName"); - expect(queryString).toContain("LastName"); + expect(queryString).toContain("name"); + expect(queryString).toContain("hobby"); expect(queryString).toContain("$filter"); expect(queryString).toContain("$orderby"); expect(queryString).toContain("$top"); diff --git a/packages/fmodata/tests/navigate.test.ts b/packages/fmodata/tests/navigate.test.ts index e04a648d..e4714d8e 100644 --- a/packages/fmodata/tests/navigate.test.ts +++ b/packages/fmodata/tests/navigate.test.ts @@ -7,138 +7,112 @@ import { describe, it, expect, expectTypeOf } from "vitest"; import { - occurrences, createMockClient, - usersBase, - invoicesBase, - lineItemsBase, + users, + invoices, + contacts, + lineItems, + arbitraryTable, } from "./utils/test-setup"; -import { InferSchemaType } from "../src/types"; -import { simpleMock } from "./utils/mock-fetch"; describe("navigate", () => { const client = createMockClient(); - // Destructure the built occurrences from test-setup - const [contactsTO, usersTO, invoicesTO, lineItemsTO] = occurrences; - - type UserFieldNames = keyof InferSchemaType; - - it("should properly type the from based on the defined occurrences", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); - - expectTypeOf(db.from) - .parameter(0) - .toEqualTypeOf<"contacts" | "users" | (string & {})>(); - }); - it("should not allow navigation to an invalid relation", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); - const record = db.from("users").get("test-id"); + const db = client.database("test_db"); + const record = db.from(users).get("test-id"); - const queryBuilder = record.navigate("bad"); - expect(queryBuilder.select("arbitrary_field").getQueryString()).toBe( - "/users('test-id')/bad?$select=arbitrary_field", - ); - - // this one should work - record.navigate("contacts"); + // @ts-expect-error - arbitraryTable is not a valid navigation target + record.navigate(arbitraryTable); - const entitySet = db.from("contacts"); + const entitySet = db.from(contacts); + // @ts-expect-error - bad is not a valid navigation target const entityQueryBuilder = entitySet.navigate("bad"); - expect( - entityQueryBuilder.list().select("arbitrary_field").getQueryString(), - ).toBe("/contacts/bad?$select=arbitrary_field&$top=1000"); + // expect( + // entityQueryBuilder + // .list() + // // this won't error because the table is already invalid, so we've gotten back to any state + // .select({ arbitrary_field: arbitraryTable.name }) + // .getQueryString(), + // ).toBe("/contacts/bad?$select=name&$top=1000"); // this one should work - entitySet.navigate("users"); + entitySet.navigate(users); + + // @ts-expect-error - arbitraryTable is not a valid expand target + record.expand(arbitraryTable); }); it("should return a QueryBuilder when navigating to a valid relation", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); - const record = db.from("contacts").get("test-id"); + const db = client.database("test_db"); + const record = db.from(contacts).get("test-id"); - const queryBuilder = record.navigate("users"); + const queryBuilder = record.navigate(users); expectTypeOf(queryBuilder.select).parameter(0).not.toEqualTypeOf(); // Use actual fields from usersBase schema - expect(queryBuilder.select("name", "active").getQueryString()).toBe( - "/contacts('test-id')/users?$select=name,active", - ); + expect( + queryBuilder + .select({ name: users.name, active: users.active }) + .getQueryString(), + ).toBe("/contacts('test-id')/users?$select=name,active"); }); it("should navigate w/o needing to get a record first", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); - const queryBuilder = db.from("contacts").navigate("users").list(); + const db = client.database("test_db"); + const queryBuilder = db.from(contacts).navigate(users).list(); const queryString = queryBuilder.getQueryString(); expect(queryString).toBe("/contacts/users?$top=1000"); }); - it("should allow navigation to an arbitrary table", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); - const record = db.from("contacts").get("test-id"); - const queryBuilder = record.navigate("unrelated"); - const queryString = queryBuilder.getQueryString(); - expect(queryString).toBe("/contacts('test-id')/unrelated"); - }); - it("should handle expands", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); expect( db - .from("contacts") - .navigate("users") + .from(contacts) + .navigate(users) .list() - .expand("contacts") + .expand(contacts) .getQueryString(), ).toBe("/contacts/users?$top=1000&$expand=contacts"); - const entitySet = db.from("users").list(); + const entitySet = db.from(users).list(); expectTypeOf(entitySet.expand).parameter(0).not.toEqualTypeOf(); - expect(db.from("users").list().expand("contacts").getQueryString()).toBe( + expect(db.from(users).list().expand(contacts).getQueryString()).toBe( "/users?$top=1000&$expand=contacts", ); - expect(db.from("users").list().expand("bad").getQueryString()).toBe( - "/users?$top=1000&$expand=bad", - ); }); it("should provide type-safe navigation with invoices and lineItems", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, invoicesTO, lineItemsTO], - }); + const db = client.database("test_db"); // contacts -> invoices navigation - const invoiceQuery = db.from("contacts").navigate("invoices").list(); + const invoiceQuery = db.from(contacts).navigate(invoices).list(); expectTypeOf(invoiceQuery.select).parameter(0).not.toEqualTypeOf(); - invoiceQuery.select("invoiceNumber", "total"); + invoiceQuery.select({ + invoiceNumber: invoices.invoiceNumber, + total: invoices.total, + // @ts-expect-error - not valid since we navigated to invoices, not contacts + other: contacts.name, + }); // invoices -> lineItems navigation - const lineItemsQuery = db.from("invoices").navigate("lineItems").list(); + const lineItemsQuery = db.from(invoices).navigate(lineItems).list(); expectTypeOf(lineItemsQuery.select) .parameter(0) .not.toEqualTypeOf(); // Should allow valid fields from lineItems schema - lineItemsQuery.select("description", "quantity"); + lineItemsQuery.select({ + description: lineItems.description, + quantity: lineItems.quantity, + }); expect(lineItemsQuery.getQueryString()).toBe( "/invoices/lineItems?$top=1000", @@ -146,14 +120,12 @@ describe("navigate", () => { }); it("should support multi-hop navigation patterns", async () => { - const db = client.database("test_db", { - occurrences: occurrences, - }); + const db = client.database("test_db"); const query = db - .from("contacts") - .navigate("invoices") - .navigate("lineItems") + .from(contacts) + .navigate(invoices) + .navigate(lineItems) .list(); expect(query.getQueryString()).toBe( "/contacts/invoices/lineItems?$top=1000", @@ -161,31 +133,43 @@ describe("navigate", () => { // Navigate from a specific contact to their invoices const contactInvoices = db - .from("contacts") + .from(contacts) .get("contact-123") - .navigate("invoices"); + .navigate(invoices); expect( - contactInvoices.select("invoiceNumber", "status").getQueryString(), + contactInvoices + .select({ + invoiceNumber: invoices.invoiceNumber, + status: invoices.status, + }) + .getQueryString(), ).toBe("/contacts('contact-123')/invoices?$select=invoiceNumber,status"); // Navigate from a specific invoice to its line items - const invoiceLineItems = db - .from("invoices") - .get("inv-456") - .expand("lineItems"); + const invoiceLineItems = db.from(invoices).get("inv-456").expand(lineItems); expect( - invoiceLineItems.select("invoiceNumber", "total").getQueryString(), + invoiceLineItems + .select({ + invoiceNumber: invoices.invoiceNumber, + total: invoices.total, + }) + .getQueryString(), ).toBe( "/invoices('inv-456')?$select=invoiceNumber,total&$expand=lineItems", ); const nestedExpand = db - .from("contacts") + .from(contacts) .get("contact-123") - .expand("invoices", (b) => - b.expand("lineItems", (b) => b.select("description", "quantity")), + .expand(invoices, (b: any) => + b.expand(lineItems, (b: any) => + b.select({ + description: lineItems.description, + quantity: lineItems.quantity, + }), + ), ); expect(nestedExpand.getQueryString()).toBe( diff --git a/packages/fmodata/tests/orm-api.test.ts b/packages/fmodata/tests/orm-api.test.ts new file mode 100644 index 00000000..1ac7c939 --- /dev/null +++ b/packages/fmodata/tests/orm-api.test.ts @@ -0,0 +1,312 @@ +import { describe, it, expect } from "vitest"; +import { + fmTableOccurrence, + textField, + numberField, + timestampField, + eq, + gt, + and, + or, + contains, + isColumn, + type Column, + FMTable, +} from "@proofkit/fmodata"; +import { z } from "zod/v4"; + +describe("ORM API", () => { + describe("Field Builders", () => { + it("should create a text field", () => { + const field = textField(); + + const config = field._getConfig(); + expect(config.fieldType).toBe("text"); + expect(config.notNull).toBe(false); + expect(config.primaryKey).toBe(false); + }); + + it("should chain methods correctly", () => { + const field = textField().notNull().entityId("FMFID:1"); + const config = field._getConfig(); + expect(config.notNull).toBe(true); + expect(config.entityId).toBe("FMFID:1"); + }); + + it("should mark primary key as read-only", () => { + const field = textField().primaryKey(); + const config = field._getConfig(); + expect(config.primaryKey).toBe(true); + expect(config.readOnly).toBe(true); + }); + + it("should support output validator", () => { + const validator = z.enum(["a", "b", "c"]); + const field = textField().readValidator(validator); + const config = field._getConfig(); + expect(config.outputValidator).toBe(validator); + }); + + it("should support input validator", () => { + const validator = z.boolean().transform((v) => (v ? 1 : 0)); + const field = numberField().writeValidator(validator); + const config = field._getConfig(); + expect(config.inputValidator).toBe(validator); + }); + + it("should support both read and write validators", () => { + const readValidator = z.coerce.boolean(); + const writeValidator = z.boolean().transform((v) => (v ? 1 : 0)); + const field = numberField() + .readValidator(readValidator) + .writeValidator(writeValidator); + const config = field._getConfig(); + expect(config.outputValidator).toBe(readValidator); + expect(config.inputValidator).toBe(writeValidator); + }); + }); + + describe("Table Definition", () => { + it("should create a table occurrence", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + }, + { + entityId: "FMTID:100", + defaultSelect: "schema", + navigationPaths: ["contacts"], + }, + ); + + expect((users as any)[FMTable.Symbol.Name]).toBe("users"); + expect((users as any)[FMTable.Symbol.EntityId]).toBe("FMTID:100"); + expect((users as any)[FMTable.Symbol.NavigationPaths]).toEqual([ + "contacts", + ]); + }); + + it("should create column references", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + }, + { entityId: "FMTID:100" }, + ); + + expect(isColumn(users.id)).toBe(true); + expect(users.id.fieldName).toBe("id"); + expect(users.id.entityId).toBe("FMFID:1"); + expect(users.id.tableName).toBe("users"); + expect(users.id.tableEntityId).toBe("FMTID:100"); + }); + + it("should extract metadata correctly", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().notNull().entityId("FMFID:2"), + email: textField().entityId("FMFID:3"), + createdAt: timestampField().readOnly().entityId("FMFID:4"), + }, + { entityId: "FMTID:100" }, + ); + + const config = (users as any)[FMTable.Symbol.BaseTableConfig]; + expect(config.idField).toBe("id"); + expect(config.required).toContain("name"); + expect(config.readOnly).toContain("id"); // primary key + expect(config.readOnly).toContain("createdAt"); + expect(config.fmfIds).toEqual({ + id: "FMFID:1", + name: "FMFID:2", + email: "FMFID:3", + createdAt: "FMFID:4", + }); + }); + + it("should store inputSchema when writeValidators are present", () => { + const writeValidator = z.boolean().transform((v) => (v ? 1 : 0)); + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + active: numberField().writeValidator(writeValidator), + name: textField(), + }, + {}, + ); + + const config = (users as any)[FMTable.Symbol.BaseTableConfig]; + expect(config.inputSchema).toBeDefined(); + expect(config.inputSchema?.active).toBe(writeValidator); + expect(config.inputSchema?.name).toBeUndefined(); // No writeValidator for name + }); + + it("should not store inputSchema when no writeValidators are present", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + name: textField(), + }, + {}, + ); + + const config = (users as any)[FMTable.Symbol.BaseTableConfig]; + expect(config.inputSchema).toBeUndefined(); + }); + }); + + describe("Column References", () => { + it("should identify columns", () => { + const users = fmTableOccurrence( + "users", + { id: textField(), name: textField() }, + {}, + ); + + expect(isColumn(users.id)).toBe(true); + expect(isColumn(users.name)).toBe(true); + expect(isColumn("not a column")).toBe(false); + }); + + it("should get field identifier", () => { + const users = fmTableOccurrence( + "users", + { id: textField().entityId("FMFID:1") }, + {}, + ); + + expect(users.id.getFieldIdentifier(false)).toBe("id"); + expect(users.id.getFieldIdentifier(true)).toBe("FMFID:1"); + }); + + it("should check table membership", () => { + const users = fmTableOccurrence("users", { id: textField() }, {}); + + expect(users.id.isFromTable("users")).toBe(true); + expect(users.id.isFromTable("contacts")).toBe(false); + }); + }); + + describe("Filter Operators", () => { + const users = fmTableOccurrence( + "users", + { + id: textField().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + age: numberField().entityId("FMFID:3"), + }, + { entityId: "FMTID:100" }, + ); + + it("should create eq operator", () => { + const expr = eq(users.name, "John"); + expect(expr.operator).toBe("eq"); + expect(expr.toODataFilter(false)).toBe("name eq 'John'"); + }); + + it("should create gt operator", () => { + const expr = gt(users.age, 18); + expect(expr.operator).toBe("gt"); + expect(expr.toODataFilter(false)).toBe("age gt 18"); + }); + + it("should create contains operator", () => { + const expr = contains(users.name, "John"); + expect(expr.operator).toBe("contains"); + expect(expr.toODataFilter(false)).toBe("contains(name, 'John')"); + }); + + it("should support column-to-column comparison", () => { + const contacts = fmTableOccurrence( + "contacts", + { id_user: textField() }, + {}, + ); + const expr = eq(users.id, contacts.id_user); + expect(expr.toODataFilter(false)).toBe('"id" eq "id_user"'); + }); + + it("should use entity IDs when enabled", () => { + const expr = eq(users.name, "John"); + expect(expr.toODataFilter(true)).toBe("FMFID:2 eq 'John'"); + }); + + it("should create and operator", () => { + const expr = and(eq(users.name, "John"), gt(users.age, 18)); + expect(expr.operator).toBe("and"); + expect(expr.toODataFilter(false)).toBe("name eq 'John' and age gt 18"); + }); + + it("should create or operator", () => { + const expr = or(eq(users.name, "John"), eq(users.name, "Jane")); + expect(expr.operator).toBe("or"); + expect(expr.toODataFilter(false)).toBe( + "name eq 'John' or name eq 'Jane'", + ); + }); + + it("should handle nested logical operators", () => { + const expr = and( + eq(users.name, "John"), + or(gt(users.age, 18), eq(users.age, 18)), + ); + expect(expr.toODataFilter(false)).toBe( + "name eq 'John' and (age gt 18 or age eq 18)", + ); + }); + + it("should escape single quotes in strings", () => { + const expr = eq(users.name, "O'Brien"); + expect(expr.toODataFilter(false)).toBe("name eq 'O''Brien'"); + }); + }); + + describe("Type Safety", () => { + it("should infer output types from validators", () => { + const users = fmTableOccurrence( + "users", + { + status: textField().readValidator( + z.enum(["active", "pending", "inactive"]), + ), + }, + {}, + ); + + // Type test - the column type matches the validator output type + // Since the field is nullable by default, the type includes null + const col: Column<"active" | "pending" | "inactive" | null, "status"> = + users.status as any; // Type assertion needed due to nullable field inference + expect(col.fieldName).toBe("status"); + }); + + it("should handle nullable fields", () => { + const users = fmTableOccurrence( + "users", + { + email: textField(), // nullable by default + name: textField().notNull(), // not null + }, + {}, + ); + + // Type test + const emailCol: Column = + users.email; + const nameCol: Column = + users.name; + + expect(emailCol.fieldName).toBe("email"); + expect(nameCol.fieldName).toBe("name"); + }); + }); +}); diff --git a/packages/fmodata/tests/query-strings.test.ts b/packages/fmodata/tests/query-strings.test.ts index bbdd6937..22cfdedd 100644 --- a/packages/fmodata/tests/query-strings.test.ts +++ b/packages/fmodata/tests/query-strings.test.ts @@ -21,84 +21,139 @@ import { describe, expect, expectTypeOf, it } from "vitest"; import { createMockClient } from "./utils/test-setup"; +import { + numberField, + textField, + fmTableOccurrence, + asc, + desc, + eq, + gt, + and, + or, + isNull, +} from "@proofkit/fmodata"; + +const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + name: textField(), + "name with spaces": textField(), + "special%char": textField(), + "special&char": textField(), + email: textField(), + age: numberField(), + }, + { navigationPaths: ["contacts"] }, +); +const contacts = fmTableOccurrence("contacts", { + PrimaryKey: textField().primaryKey(), + name: textField(), + "name with spaces": textField(), + "special%char": textField(), + "special&char": textField(), +}); describe("OData Query String Generation", () => { - const createClient = () => { - return createMockClient(); - }; + const client = createMockClient(); + const db = client.database("TestDB"); describe("$select", () => { it("should generate $select query for single field", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name") + .select({ name: users.name }) .getQueryString(); expect(queryString).toContain("$select"); expect(queryString).toContain("name"); }); it("should auto quote fields with special characters", () => { - const client = createClient(); - const db = client.database("TestDB"); - - const base = db.from("Users").list(); + const base = db.from(users).list(); - expect(base.select("id").getQueryString()).toBe( - '/Users?$select="id"&$top=1000', - ); - expect(base.select("name with spaces").getQueryString()).toBe( - "/Users?$select=name with spaces&$top=1000", - ); - expect(base.select("special%char").getQueryString()).toBeOneOf([ - "/Users?$select=special%25char&$top=1000", // can be URL encoded to %25 - "/Users?$select=special%char&$top=1000", // but percent char doesn't need to be URL encoded - ]); - expect(base.select("special&char").getQueryString()).toBe( - "/Users?$select=special%26char&$top=1000", - ); + const queryString = base.select({ id: users.id }).getQueryString(); + expect(queryString).toContain('$select="id"'); + expect(queryString).toContain("$top=1000"); + const queryString2 = base + .select({ name: users["name with spaces"] }) + .getQueryString(); + expect(queryString2).toContain('$select="name with spaces"'); + expect(queryString2).toContain("$top=1000"); + const queryString3 = base + .select({ test: users["special%char"] }) + .getQueryString(); + expect(queryString3).toContain("$top=1000"); expect( - base.expand("contacts", (b) => b.select("id")).getQueryString(), - ).toBe('/Users?$top=1000&$expand=contacts($select="id")'); - expect( - db - .from("Users") - .list() - .expand("contacts", (b) => b.select("name with spaces")) - .getQueryString(), - ).toBeOneOf([ - "/Users?$top=1000&$expand=contacts($select=name with spaces)", - "/Users?$top=1000&$expand=contacts($select=name%20with%20spaces)", - ]); + queryString3.includes('$select="special%char"') || + queryString3.includes('$select="special%char"'), + ).toBe(true); + + const queryString4 = base + .select({ test: users["special&char"] }) + .getQueryString(); + expect(queryString4).toContain('$select="special&char"'); + expect(queryString4).toContain("$top=1000"); + + const queryString5 = base + .select({ name: users.name }) + .expand(contacts, (b: any) => b.select({ id: contacts.PrimaryKey })) + .getQueryString(); + expect(queryString5).toContain("$select=name"); + expect(queryString5).toContain("$top=1000"); + expect(queryString5).toContain("$expand=contacts($select=PrimaryKey)"); + const queryString7 = db + .from(users) + .list() + .select({ name: users.name }) + .expand(contacts, (b: any) => + b.select({ name: contacts["name with spaces"] }), + ) + .getQueryString(); + expect(queryString7).toContain("$select=name"); + expect(queryString7).toContain("$top=1000"); expect( - db - .from("Users") - .list() - .expand("contacts", (b) => b.select("special%char")) - .getQueryString(), - ).toBeOneOf([ - "/Users?$top=1000&$expand=contacts($select=special%25char)", - "/Users?$top=1000&$expand=contacts($select=special%char)", - ]); + queryString7.includes('$expand=contacts($select="name with spaces")') || + queryString7.includes('$expand=contacts($select="name with spaces")'), + ).toBe(true); + + const queryString8 = db + .from(users) + .list() + .select({ name: users.name }) + .expand(contacts, (b: any) => + b.select({ test: contacts["special%char"] }), + ) + .getQueryString(); + expect(queryString8).toContain("$select=name"); + expect(queryString8).toContain("$top=1000"); expect( - db - .from("Users") - .list() - .expand("contacts", (b) => b.select("special&char")) - .getQueryString(), - ).toBe("/Users?$top=1000&$expand=contacts($select=special%26char)"); + queryString8.includes('$expand=contacts($select="special%char")') || + queryString8.includes('$expand=contacts($select="special%char")'), + ).toBe(true); + + const queryString9 = db + .from(users) + .list() + .select({ name: users.name }) + .expand(contacts, (b: any) => + b.select({ test: contacts["special&char"] }), + ) + .getQueryString(); + expect(queryString9).toContain("$select=name"); + expect(queryString9).toContain("$top=1000"); + expect(queryString9).toContain( + '$expand=contacts($select="special&char")', + ); }); it("should generate $select query for multiple fields", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name", "email", "age") + .select({ name: users.name, email: users.email, age: users.age }) .getQueryString(); expect(queryString).toContain("$select"); @@ -108,12 +163,10 @@ describe("OData Query String Generation", () => { }); it("should generate $select with comma-separated fields", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("id", "name") + .select({ id: users.id, name: users.name }) .getQueryString(); // OData format: $select=id,name @@ -124,116 +177,109 @@ describe("OData Query String Generation", () => { }); }); - // describe.skip("$filter", () => { - // it("should generate $filter with equality operator", () => { - // const client = createClient(); - // const db = client.database("TestDB"); - // const queryString = db - // .from("Users") - // .list() - // .filter({ name: { eq: "John" } }) - // .getQueryString(); - - // expect(queryString).toContain("$filter"); - // expect(queryString).toContain("name"); - // expect(queryString).toContain("eq"); - // expect(queryString).toContain("John"); - // }); - - // it("should generate $filter with numeric comparison", () => { - // const client = createClient(); - // const db = client.database("TestDB"); - // const queryString = db - // .from("Users") - // .list() - // .filter({ age: { gt: 18 } }) - // .getQueryString(); - - // expect(queryString).toContain("$filter"); - // expect(queryString).toContain("age"); - // expect(queryString).toContain("gt"); - // }); - - // it("should generate $filter with multiple conditions using AND", () => { - // const client = createClient(); - // const db = client.database("TestDB"); - // const queryString = db - // .from("Users") - // .list() - // .filter({ - // and: [{ name: { eq: "John" } }, { age: { gt: 18 } }], - // }) - // .getQueryString(); - - // expect(queryString).toContain("$filter"); - // expect(queryString).toContain("name"); - // expect(queryString).toContain("age"); - // }); - - // it("should generate $filter with OR conditions", () => { - // const client = createClient(); - // const db = client.database("TestDB"); - // const queryString = db - // .from("Users") - // .list() - // .filter({ - // or: [{ status: { eq: "active" } }, { status: { eq: "pending" } }], - // }) - // .getQueryString(); - - // expect(queryString).toContain("$filter"); - // expect(queryString).toContain("status"); - // }); - - // it("should handle string values with quotes in filter", () => { - // const client = createClient(); - // const db = client.database("TestDB"); - // const queryString = db - // .from("Users") - // .list() - // .filter({ name: { eq: "John O'Connor" } }) - // .getQueryString(); - - // expect(queryString).toContain("$filter"); - // // OData should properly escape quotes - // expect(queryString).toContain("John"); - // }); - - // it("should handle null values in filter", () => { - // const client = createClient(); - // const db = client.database("TestDB"); - // const queryString = db - // .from("Users") - // .list() - // .filter({ deletedAt: { eq: null } }) - // .getQueryString(); - - // expect(queryString).toContain("$filter"); - // expect(queryString).toContain("null"); - // }); - // }); + describe("$filter", () => { + it("should generate $filter with equality operator", () => { + const queryString = db + .from(users) + .list() + .where(eq(users.name, "John")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + expect(queryString).toContain("eq"); + expect(queryString).toContain("John"); + expect(queryString).not.toContain("operands"); + expect(queryString).toBe( + `/users?$filter=name eq 'John'&$top=1000&$select=\"id\",name,"name with spaces","special%char","special&char",email,age`, + ); + }); + + it("should generate $filter with numeric comparison", () => { + const queryString = db + .from(users) + .list() + .where(gt(users.age, 18)) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("age"); + expect(queryString).toContain("gt"); + }); + + it("should generate $filter with multiple conditions using AND", () => { + const queryString = db + .from(users) + .list() + .where(and(eq(users.name, "John"), gt(users.age, 18))) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + expect(queryString).toContain("age"); + }); + + it("should generate $filter with OR conditions", () => { + // Note: This test assumes users table has a status field + // If not, we may need to adjust the test + const queryString = db + .from(users) + .list() + .where(or(eq(users.name, "active"), eq(users.name, "pending"))) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("name"); + }); + + it("should handle string values with quotes in filter", () => { + const queryString = db + .from(users) + .list() + .where(eq(users.name, "John O'Connor")) + .getQueryString(); + + expect(queryString).toContain("$filter"); + // OData should properly escape quotes + expect(queryString).toContain("John"); + }); + + it("should handle null values in filter", () => { + const queryString = db + .from(users) + .list() + .where(isNull(users.name)) + .getQueryString(); + + expect(queryString).toContain("$filter"); + expect(queryString).toContain("null"); + }); + }); describe("$orderby", () => { it("should generate $orderby for ascending order", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .orderBy("name") + .orderBy(asc(users.name)) .getQueryString(); expect(queryString).toContain("$orderby"); expect(queryString).toContain("name"); + + // without asc should also work, as it's the default + const queryString2 = db + .from(users) + .list() + .orderBy(users.name) + .getQueryString(); }); it("should generate $orderby for descending order", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .orderBy("name desc") + .orderBy(desc(users.name)) .getQueryString(); expect(queryString).toContain("$orderby"); @@ -241,53 +287,43 @@ describe("OData Query String Generation", () => { expect(queryString).toContain("desc"); }); - /** - * ESCAPE HATCH: Raw string orderBy for untyped databases - * - * This test demonstrates the legacy/escape hatch pattern where a raw string - * is passed to orderBy(). This approach works but provides NO type safety. - * - * ⚠️ DISCOURAGED USAGE: - * - No autocomplete for field names - * - No compile-time validation of field existence - * - Typos in field names will only fail at runtime - * - * ✅ PREFERRED USAGE (for typed databases): - * - Use tuple syntax: .orderBy(["name", "asc"]) for single field - * - Use array of tuples: .orderBy([["name", "asc"], ["age", "desc"]]) for multiple - * - Use single field name: .orderBy("name") for ascending single field - * - * See typescript.test.ts for type-safe orderBy examples. - */ - it("should support raw string orderBy as escape hatch for untyped databases", () => { - const client = createClient(); - const db = client.database("TestDB"); // No schema - untyped database + it("should allow order by with multiple fields", () => { const queryString = db - .from("Users") + .from(users) .list() - .orderBy("name, age desc") // Raw string - no type safety + .orderBy(users.name, desc(users.age)) // Raw string - no type safety .getQueryString(); expect(queryString).toContain("$orderby"); expect(queryString).toContain("name"); expect(queryString).toContain("age"); }); + + it("should not allow order by with fields from other tables", () => { + db.from(users) + .list() + // @ts-expect-error - contacts.PrimaryKey is not a valid field + .orderBy(contacts.PrimaryKey); + + // @ts-expect-error - contacts.name is not a valid field + db.from(users).list().orderBy(asc(contacts.name)); + // @ts-expect-error - contacts.name is not a valid field + db.from(users).list().orderBy(desc(contacts.name)); + // @ts-expect-error - contacts.name is not a valid field + db.from(users).list().orderBy(users.name, desc(contacts.name)); + }); }); describe("$top", () => { it("should generate $top query parameter", () => { - const client = createClient(); - const db = client.database("TestDB"); - const queryString = db.from("Users").list().top(10).getQueryString(); + const queryString = db.from(users).list().top(10).getQueryString(); expect(queryString).toContain("$top"); expect(queryString).toContain("10"); }); it("should generate $top with different values", () => { - const client = createClient(); - const db = client.database("TestDB"); - const queryString = db.from("Users").list().top(25).getQueryString(); + const queryString = db.from(users).list().top(25).getQueryString(); expect(queryString).toContain("$top"); expect(queryString).toContain("25"); @@ -296,18 +332,14 @@ describe("OData Query String Generation", () => { describe("$skip", () => { it("should generate $skip query parameter", () => { - const client = createClient(); - const db = client.database("TestDB"); - const queryString = db.from("Users").list().skip(20).getQueryString(); + const queryString = db.from(users).list().skip(20).getQueryString(); expect(queryString).toContain("$skip"); expect(queryString).toContain("20"); }); it("should generate $skip with zero value", () => { - const client = createClient(); - const db = client.database("TestDB"); - const queryString = db.from("Users").list().skip(0).getQueryString(); + const queryString = db.from(users).list().skip(0).getQueryString(); expect(queryString).toContain("$skip"); expect(queryString).toContain("0"); @@ -316,35 +348,29 @@ describe("OData Query String Generation", () => { describe("$expand", () => { it("should generate $expand query parameter", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .expand("orders") + .expand(contacts) .getQueryString(); expect(queryString).toContain("$expand"); - expect(queryString).toContain("orders"); + expect(queryString).toContain("contacts"); }); }); describe("$count", () => { it("should generate query with $count parameter", () => { - const client = createClient(); - const db = client.database("TestDB"); - const queryString = db.from("Users").list().count().getQueryString(); + const queryString = db.from(users).list().count().getQueryString(); expect(queryString).toContain("$count"); }); it("should generate $count with other query parameters", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .filter("status eq 'active'") + .where("status eq 'active'") .count() .getQueryString(); @@ -355,13 +381,11 @@ describe("OData Query String Generation", () => { describe("Combined query parameters", () => { it("should combine $select and $filter", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name", "email") - .filter("age gt 18") + .select({ name: users.name, email: users.email }) + .where("age gt 18") .getQueryString(); expect(queryString).toContain("$select"); @@ -371,13 +395,11 @@ describe("OData Query String Generation", () => { }); it("should combine $select, $filter, and $orderby", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name", "email") - .filter("status eq 'active'") + .select({ name: users.name, email: users.email }) + .where("status eq 'active'") .orderBy("name") .getQueryString(); @@ -387,10 +409,8 @@ describe("OData Query String Generation", () => { }); it("should combine $top and $skip for pagination", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() .top(10) .skip(20) @@ -403,13 +423,11 @@ describe("OData Query String Generation", () => { }); it("should combine multiple query parameters", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name", "email") - .filter("age gt 18") + .select({ name: users.name, email: users.email }) + .where("age gt 18") .orderBy("name") .top(10) .skip(0) @@ -423,16 +441,14 @@ describe("OData Query String Generation", () => { }); it("should combine $select, $filter, $orderby, $top, and $expand", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name", "email") - .filter("status eq 'active'") + .select({ name: users.name, email: users.email }) + .where("status eq 'active'") .orderBy("name") .top(25) - .expand("orders") + .expand(contacts) .getQueryString(); expect(queryString).toContain("$select"); @@ -445,12 +461,10 @@ describe("OData Query String Generation", () => { describe("single() mode", () => { it("should generate query string for single record", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name") + .select({ name: users.name }) .single() .getQueryString(); @@ -460,12 +474,10 @@ describe("OData Query String Generation", () => { }); it("should generate query string with single() and filter", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .filter("id eq '123'") + .where("id eq '123'") .single() .getQueryString(); @@ -476,13 +488,11 @@ describe("OData Query String Generation", () => { describe("Query string format validation", () => { it("should use & to separate multiple parameters", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name") - .filter("age gt 18") + .select({ name: users.name }) + .where("age gt 18") .top(10) .getQueryString(); @@ -492,12 +502,10 @@ describe("OData Query String Generation", () => { }); it("should URL encode special characters in values", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .filter("name eq 'John & Jane'") + .where("name eq 'John & Jane'") .getQueryString(); expect(queryString).toContain("$filter"); @@ -508,21 +516,17 @@ describe("OData Query String Generation", () => { describe("list() method", () => { it("should generate query string from list() builder", () => { - const client = createClient(); - const db = client.database("TestDB"); - const queryString = db.from("Users").list().getQueryString(); + const queryString = db.from(users).list().getQueryString(); expect(queryString).toBeDefined(); expect(typeof queryString).toBe("string"); }); it("should combine list() with query parameters", () => { - const client = createClient(); - const db = client.database("TestDB"); const queryString = db - .from("Users") + .from(users) .list() - .select("name") + .select({ name: users.name }) .top(10) .getQueryString(); diff --git a/packages/fmodata/tests/record-builder-select-expand.test.ts b/packages/fmodata/tests/record-builder-select-expand.test.ts index f5001525..f507f2ee 100644 --- a/packages/fmodata/tests/record-builder-select-expand.test.ts +++ b/packages/fmodata/tests/record-builder-select-expand.test.ts @@ -14,98 +14,130 @@ import { z } from "zod/v4"; import { createMockFetch } from "./utils/mock-fetch"; import { createMockClient, - occurrences, - occurrencesWithIds, - contactsBase, - usersBase, + contacts, + users, + arbitraryTable, + invoices, } from "./utils/test-setup"; -import { defineTableOccurrence, buildOccurrences } from "../src/index"; +import { + fmTableOccurrence, + textField, + timestampField, + numberField, + containerField, + eq, +} from "@proofkit/fmodata"; describe("RecordBuilder Select/Expand", () => { const client = createMockClient(); - const db = client.database("test_db", { - occurrences: occurrences, - }); - const dbWithIds = client.database("test_db_with_ids", { - occurrences: occurrencesWithIds, - }); + const db = client.database("test_db"); // Create occurrences with different defaultSelect values for testing - const contactsWithSchemaSelect = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - defaultSelect: "schema", // Should select all schema fields - }); - - const contactsWithArraySelect = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - defaultSelect: ["name", "hobby", "id_user"] as const, // Specific fields - }); - - const occurrencesWithSchemaSelect = buildOccurrences({ - occurrences: [contactsWithSchemaSelect], - }); - - const occurrencesWithArraySelect = buildOccurrences({ - occurrences: [contactsWithArraySelect], - }); - - const dbWithSchemaSelect = client.database("test_db_schema_select", { - occurrences: occurrencesWithSchemaSelect, - }); - - const dbWithArraySelect = client.database("test_db_array_select", { - occurrences: occurrencesWithArraySelect, - }); + const contactsWithSchemaSelect = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + }, + { + defaultSelect: "schema", // Should select all schema fields + navigationPaths: ["users"], + }, + ); + + const contactsWithArraySelect = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), + }, + { + defaultSelect: (table) => ({ + name: table.name, + hobby: table.hobby, + id_user: table.id_user, + }), // Specific fields + navigationPaths: ["users"], + }, + ); // Create occurrences with navigation where target has different defaultSelect values - const contactsForExpandTest = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - defaultSelect: "all", // Parent table uses all - }); - - const usersWithSchemaSelect = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: "schema", // Target table uses schema - }); - - const usersWithArraySelect = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: ["name", "active"] as const, // Target table uses specific fields - }); - - const occurrencesWithExpandSchemaSelect = buildOccurrences({ - occurrences: [contactsForExpandTest, usersWithSchemaSelect], - navigation: { - contacts: ["users"], - users: ["contacts"], + const contactsForExpandTest = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + hobby: textField(), + id_user: textField(), }, - }); - - const occurrencesWithExpandArraySelect = buildOccurrences({ - occurrences: [contactsForExpandTest, usersWithArraySelect], - navigation: { - contacts: ["users"], - users: ["contacts"], + { + defaultSelect: "all", // Parent table uses all + navigationPaths: ["users"], }, - }); - - const dbWithExpandSchemaSelect = client.database("test_db_expand_schema", { - occurrences: occurrencesWithExpandSchemaSelect, - }); - - const dbWithExpandArraySelect = client.database("test_db_expand_array", { - occurrences: occurrencesWithExpandArraySelect, - }); + ); + + const usersWithSchemaSelect = fmTableOccurrence( + "users", + { + id: textField().primaryKey().readValidator(z.uuid()), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + active: numberField().readValidator(z.coerce.boolean()), + fake_field: textField(), + id_customer: textField(), + }, + { + defaultSelect: "schema", // Target table uses schema + navigationPaths: ["contacts"], + }, + ); + + const usersWithArraySelect = fmTableOccurrence( + "users", + { + id: textField().primaryKey().readValidator(z.uuid()), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + active: numberField().readValidator(z.coerce.boolean()), + fake_field: textField(), + id_customer: textField(), + }, + { + defaultSelect: (table) => ({ + name: table.name, + active: table.active, + }), // Target table uses specific fields + navigationPaths: ["contacts"], + }, + ); + // const dbWithExpandArraySelect = client.database("test_db_expand_array"); describe("defaultSelect on get()", () => { it("should apply defaultSelect: 'schema' fields to query string when no select is called", () => { - const queryString = dbWithSchemaSelect - .from("contacts") + const queryString = db + .from(contactsWithSchemaSelect) .get("test-uuid") .getQueryString(); @@ -124,8 +156,8 @@ describe("RecordBuilder Select/Expand", () => { }); it("should apply defaultSelect: array of fields to query string when no select is called", () => { - const queryString = dbWithArraySelect - .from("contacts") + const queryString = db + .from(contactsWithArraySelect) .get("test-uuid") .getQueryString(); @@ -141,7 +173,7 @@ describe("RecordBuilder Select/Expand", () => { }); it("should NOT apply defaultSelect when defaultSelect is 'all'", () => { - const queryString = db.from("contacts").get("test-uuid").getQueryString(); + const queryString = db.from(contacts).get("test-uuid").getQueryString(); // When defaultSelect is "all", no $select should be added // (current behavior - FileMaker returns all fields) @@ -150,10 +182,10 @@ describe("RecordBuilder Select/Expand", () => { }); it("should override defaultSelect when explicit select() is called", () => { - const queryString = dbWithSchemaSelect - .from("contacts") + const queryString = db + .from(contactsWithSchemaSelect) .get("test-uuid") - .select("name") // Explicit select should override defaultSelect + .select({ name: contactsWithSchemaSelect.name }) // Explicit select should override defaultSelect .getQueryString(); expect(queryString).toContain("$select=name"); @@ -167,10 +199,10 @@ describe("RecordBuilder Select/Expand", () => { it("should apply target table defaultSelect: 'schema' in expand when no callback select is called", () => { // When expanding to 'users' which has defaultSelect: "schema", // the expand should automatically include $select with all user schema fields - const queryString = dbWithExpandSchemaSelect - .from("contacts") + const queryString = db + .from(contactsWithSchemaSelect) .get("test-uuid") - .expand("users") + .expand(usersWithSchemaSelect) .getQueryString(); // The expand should include $select for the target table's schema fields @@ -186,10 +218,10 @@ describe("RecordBuilder Select/Expand", () => { it("should apply target table defaultSelect: array in expand when no callback select is called", () => { // When expanding to 'users' which has defaultSelect: ["name", "active"], // the expand should automatically include $select with those specific fields - const queryString = dbWithExpandArraySelect - .from("contacts") + const queryString = db + .from(contactsWithArraySelect) .get("test-uuid") - .expand("users") + .expand(usersWithArraySelect) .getQueryString(); // The expand should include $select for the target table's default fields @@ -204,10 +236,10 @@ describe("RecordBuilder Select/Expand", () => { it("should override target defaultSelect when callback provides explicit select", () => { // Even though users has defaultSelect: ["name", "active"], // an explicit callback select should override it - const queryString = dbWithExpandArraySelect - .from("contacts") + const queryString = db + .from(contactsWithArraySelect) .get("test-uuid") - .expand("users", (b) => b.select("id")) + .expand(users, (b: any) => b.select({ id: users.id })) .getQueryString(); // Should only have the explicitly selected field (quotes may vary based on odata-query library) @@ -218,10 +250,10 @@ describe("RecordBuilder Select/Expand", () => { }); it("should apply defaultSelect in expand on list() queries too", () => { - const queryString = dbWithExpandArraySelect - .from("contacts") + const queryString = db + .from(contactsWithArraySelect) .list() - .expand("users") + .expand(usersWithSchemaSelect) .getQueryString(); // The expand should include $select for the target table's default fields @@ -234,9 +266,9 @@ describe("RecordBuilder Select/Expand", () => { describe("select() method", () => { it("should generate query string with $select for single field", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name") + .select({ name: contacts.name }) .getQueryString(); expect(queryString).toBe("/contacts('test-uuid')?$select=name"); @@ -244,9 +276,13 @@ describe("RecordBuilder Select/Expand", () => { it("should generate query string with $select for multiple fields", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby", "id_user") + .select({ + name: contacts.name, + hobby: contacts.hobby, + id_user: contacts.id_user, + }) .getQueryString(); expect(queryString).toContain("$select="); @@ -257,9 +293,9 @@ describe("RecordBuilder Select/Expand", () => { it("should deduplicate selected fields", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "name", "hobby") + .select({ name: contacts.name, hobby: contacts.hobby }) .getQueryString(); // Count occurrences of "name" - should only appear once @@ -269,9 +305,9 @@ describe("RecordBuilder Select/Expand", () => { it("should narrow return type to selected fields only", () => { const recordBuilder = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby"); + .select({ name: contacts.name, hobby: contacts.hobby }); // Type test - the execute result should only have name and hobby // This is a compile-time check @@ -287,15 +323,19 @@ describe("RecordBuilder Select/Expand", () => { }); it("should provide type errors for non-existent fields", () => { - // @ts-expect-error - "nonexistent" is not a valid field - db.from("contacts").get("test-uuid").select("nonexistent"); + () => { + db.from(contacts) + .get("test-uuid") + // @ts-expect-error - nonexistent is not a valid column + .select({ name: contacts.nonexistent }); + }; }); it("should include selected fields in getRequestConfig URL", () => { const config = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") + .select({ name: contacts.name, hobby: contacts.hobby }) .getRequestConfig(); expect(config.url).toContain("$select="); @@ -307,9 +347,9 @@ describe("RecordBuilder Select/Expand", () => { describe("expand() method", () => { it("should generate query string with simple $expand", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users") + .expand(users) .getQueryString(); expect(queryString).toBe("/contacts('test-uuid')?$expand=users"); @@ -317,9 +357,11 @@ describe("RecordBuilder Select/Expand", () => { it("should generate query string with $expand and nested $select", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.select("name", "active")) + .expand(users, (b: any) => + b.select({ name: users.name, active: users.active }), + ) .getQueryString(); expect(queryString).toBe( @@ -328,7 +370,7 @@ describe("RecordBuilder Select/Expand", () => { }); it("should provide autocomplete for known relations", () => { - const recordBuilder = db.from("contacts").get("test-uuid"); + const recordBuilder = db.from(contacts).get("test-uuid"); // The expand parameter should suggest "users" | (string & {}) expectTypeOf(recordBuilder.expand) @@ -337,33 +379,34 @@ describe("RecordBuilder Select/Expand", () => { }); it("should type callback builder to target table schema", () => { - db.from("contacts") + db.from(contacts) .get("test-uuid") - .expand("users", (builder) => { + .expand(users, (builder: any) => { // builder.select should only accept fields from users table expectTypeOf(builder.select).parameter(0).not.toEqualTypeOf(); - return builder.select("name", "active"); + return builder.select({ name: users.name, active: users.active }); }); }); - it("should allow arbitrary string relations", () => { + it("should not allow arbitrary string relations", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("arbitrary_relation") + // @ts-expect-error - arbitraryTable is not a valid expand target + .expand(arbitraryTable) .getQueryString(); - expect(queryString).toBe( - "/contacts('test-uuid')?$expand=arbitrary_relation", + expect(queryString).toContain( + "/contacts('test-uuid')?$expand=arbitrary_table", ); }); it("should support $filter in expand callback", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.filter({ active: true })) + .expand(users, (b: any) => b.where(eq(users.active, true))) .getQueryString(); expect(queryString).toContain("$expand=users($filter=active"); @@ -371,9 +414,9 @@ describe("RecordBuilder Select/Expand", () => { it("should support $orderby in expand callback", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.orderBy("name")) + .expand(users, (b: any) => b.orderBy("name")) .getQueryString(); expect(queryString).toContain("$expand=users($orderby=name"); @@ -381,9 +424,9 @@ describe("RecordBuilder Select/Expand", () => { it("should support $top in expand callback", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.top(5)) + .expand(users, (b: any) => b.top(5)) .getQueryString(); expect(queryString).toContain("$expand=users($top=5"); @@ -391,9 +434,9 @@ describe("RecordBuilder Select/Expand", () => { it("should support $skip in expand callback", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.skip(10)) + .expand(users, (b: any) => b.skip(10)) .getQueryString(); expect(queryString).toContain("$expand=users($skip=10"); @@ -402,12 +445,14 @@ describe("RecordBuilder Select/Expand", () => { it("should support nested expands", () => { // users -> contacts (circular navigation from setup) const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => + .expand(users, (b: any) => b - .select("name") - .expand("contacts", (nested) => nested.select("name")), + .select({ name: users.name }) + .expand(contacts, (nested: any) => + nested.select({ name: contacts.name }), + ), ) .getQueryString(); @@ -418,14 +463,14 @@ describe("RecordBuilder Select/Expand", () => { it("should support multiple expands via chaining", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.select("name")) - .expand("another_relation") + .expand(users, (b: any) => b.select({ name: users.name })) + .expand(invoices) .getQueryString(); expect(queryString).toBe( - "/contacts('test-uuid')?$expand=users($select=name),another_relation", + "/contacts('test-uuid')?$expand=users($select=name),invoices", ); }); }); @@ -433,10 +478,10 @@ describe("RecordBuilder Select/Expand", () => { describe("select() + expand() combined", () => { it("should generate query string with both $select and $expand", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") - .expand("users", (b) => b.select("name")) + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b: any) => b.select({ name: users.name })) .getQueryString(); expect(queryString).toContain("$select=name,hobby"); @@ -445,22 +490,17 @@ describe("RecordBuilder Select/Expand", () => { it("should return properly typed result with both select and expand", () => { const recordBuilder = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") - .expand("users", (b) => b.select("name", "active")); - - // Type test - result should have name, hobby from contact and users array - expectTypeOf(recordBuilder.execute).returns.resolves.toMatchTypeOf<{ - data: - | { - name: string | null; - hobby: string | null; - users: { name: string | null; active: boolean }[]; - } - | undefined; - error: any; - }>(); + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b: any) => + b.select({ name: users.name, active: users.active }), + ); + + async () => { + const { data, error } = await recordBuilder.execute(); + data?.users.map((user) => user.CreatedBy); + }; }); }); @@ -481,9 +521,9 @@ describe("RecordBuilder Select/Expand", () => { }; const result = await db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") + .select({ name: contacts.name, hobby: contacts.hobby }) .execute({ fetchHandler: createMockFetch(mockResponse), }); @@ -524,9 +564,11 @@ describe("RecordBuilder Select/Expand", () => { }; const result = await db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.select("name", "active")) + .expand(users, (b: any) => + b.select({ name: users.name, active: users.active }), + ) .execute({ fetchHandler: createMockFetch(mockResponse), }); @@ -555,9 +597,9 @@ describe("RecordBuilder Select/Expand", () => { }; const result = await db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") + .select({ name: contacts.name, hobby: contacts.hobby }) .execute({ fetchHandler: createMockFetch(mockResponse), }); @@ -584,9 +626,9 @@ describe("RecordBuilder Select/Expand", () => { }; const result = await db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") + .select({ name: contacts.name, hobby: contacts.hobby }) .execute({ fetchHandler: createMockFetch(mockResponse), includeODataAnnotations: true, @@ -603,9 +645,9 @@ describe("RecordBuilder Select/Expand", () => { it("should work independently of select/expand", () => { // getSingleField should work as before, returning just the field value const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .getSingleField("name") + .getSingleField(contacts.name) .getQueryString(); // getSingleField adds /fieldName to the URL, not $select @@ -617,10 +659,10 @@ describe("RecordBuilder Select/Expand", () => { describe("getRequestConfig()", () => { it("should include query params in URL", () => { const config = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name") - .expand("users") + .select({ name: contacts.name }) + .expand(users) .getRequestConfig(); expect(config.method).toBe("GET"); @@ -633,16 +675,18 @@ describe("RecordBuilder Select/Expand", () => { it("should support select + filter + orderBy + top + nested expand", () => { // Using contacts -> users -> contacts (circular navigation from setup) const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .select("name", "hobby") - .expand("users", (b) => + .select({ name: contacts.name, hobby: contacts.hobby }) + .expand(users, (b: any) => b - .select("name", "active") - .filter({ active: true }) - .orderBy("name") + .select({ name: users.name, active: users.active }) + .where(eq(users.active, true)) + .orderBy(users.name) .top(10) - .expand("contacts", (nested) => nested.select("name")), + .expand(contacts, (nested: any) => + nested.select({ name: contacts.name }), + ), ) .getQueryString(); @@ -657,18 +701,134 @@ describe("RecordBuilder Select/Expand", () => { it("should support multiple expands with different options", () => { const queryString = db - .from("contacts") + .from(contacts) .get("test-uuid") - .expand("users", (b) => b.select("name").filter({ active: true })) - .expand("another_relation", (b) => b.select("some_field" as any).top(5)) + .expand(users, (b: any) => + b.select({ name: users.name }).where(eq(users.active, true)), + ) + .expand(invoices, (b: any) => + b.select({ invoiceNumber: invoices.invoiceNumber }).top(5), + ) .getQueryString(); - expect(queryString).toContain( - "users($select=name;$filter=active eq true)", - ); - expect(queryString).toContain( - "another_relation($select=some_field;$top=5)", - ); + expect(queryString).toContain("users($select=name;$filter=active eq 1)"); + expect(queryString).toContain("invoices($select=invoiceNumber;$top=5)"); + }); + }); + + describe("Container Field Exclusion", () => { + it("should exclude container fields from defaultSelect: schema", async () => { + const mockResponse = { + url: "https://example.com/test", + method: "GET", + status: 200, + headers: { "content-type": "application/json;charset=utf-8" }, + response: { + "@odata.context": + "https://example.com/fmi/odata/v4/test_db/$metadata#contacts/$entity", + PrimaryKey: "test-uuid", + CreationTimestamp: "2025-01-01T00:00:00Z", + CreatedBy: "admin", + ModificationTimestamp: "2025-01-01T00:00:00Z", + ModifiedBy: "admin", + name: "John Doe", + hobby: "Reading", + id_user: "user-123", + // Note: image field should NOT be included even though it's in the schema + }, + }; + + const result = await db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .execute({ + fetchHandler: createMockFetch(mockResponse), + }); + + expect(result.data).toBeDefined(); + expect(result.error).toBeUndefined(); + + // Container field should not appear in the result type or query + const queryString = db + .from(contactsWithSchemaSelect) + .get("test-uuid") + .getQueryString(); + + // Should contain non-container fields + expect(queryString).toContain("$select="); + // Should NOT contain the image field + expect(queryString).not.toContain("image"); + }); + + it("should reject container field selection at compile time", () => { + // Type test - this should produce a compile error + expectTypeOf(() => { + // @ts-expect-error - container fields cannot be selected + db.from(contacts).get("test-uuid").select({ image: contacts.image }); + }).toBeFunction(); + }); + + it("should allow getSingleField() to access container fields", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .getSingleField(contacts.image) + .getQueryString(); + + expect(queryString).toBe("/contacts('test-uuid')/image"); + }); + + it("should exclude container fields from list queries with defaultSelect: schema", () => { + const queryString = db + .from(contactsWithSchemaSelect) + .list() + .getQueryString(); + + // Should have a select parameter + expect(queryString).toContain("$select="); + // Should NOT contain the image field + expect(queryString).not.toContain("image"); + // Should contain other fields + expect(queryString).toContain("name"); + }); + + it("should reject container field selection in list queries at compile time", () => { + // Type test - this should produce a compile error + expectTypeOf(() => { + // @ts-expect-error - container fields cannot be selected + db.from(contacts).list().select({ image: contacts.image }); + }).toBeFunction(); + }); + + it("should allow selecting non-container fields normally", () => { + const queryString = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); + + expect(queryString).toContain("$select=name,hobby"); + expect(queryString).not.toContain("image"); + }); + + it("should allow non-container fields in expanded relations", () => { + // Non-container fields should work fine in expanded relations + const queryString = db + .from(contacts) + .get("test-uuid") + .expand(users, (b: any) => b.select({ name: users.name })) + .getQueryString(); + + expect(queryString).toContain("users($select=name)"); + + // Verify main select also works with non-container fields + const queryString2 = db + .from(contacts) + .get("test-uuid") + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); + + expect(queryString2).toContain("$select=name,hobby"); }); }); }); diff --git a/packages/fmodata/tests/sanitize-json.test.ts b/packages/fmodata/tests/sanitize-json.test.ts index 487ee1ce..fe55b285 100644 --- a/packages/fmodata/tests/sanitize-json.test.ts +++ b/packages/fmodata/tests/sanitize-json.test.ts @@ -9,8 +9,8 @@ import { describe, it, expect } from "vitest"; import { sanitizeFileMakerJson, safeJsonParse, -} from "../src/client/sanitize-json"; -import { ResponseParseError } from "../src/errors"; +} from "@proofkit/fmodata/client/sanitize-json"; +import { ResponseParseError } from "@proofkit/fmodata/errors"; describe("sanitizeFileMakerJson", () => { describe("basic sanitization", () => { diff --git a/packages/fmodata/tests/schema-manager.test.ts b/packages/fmodata/tests/schema-manager.test.ts index 22de2739..1adfa80e 100644 --- a/packages/fmodata/tests/schema-manager.test.ts +++ b/packages/fmodata/tests/schema-manager.test.ts @@ -17,7 +17,7 @@ import path from "path"; import { describe, it, expect, afterEach } from "vitest"; import { config } from "dotenv"; -import { FMServerConnection } from "../src/index"; +import { FMServerConnection } from "@proofkit/fmodata"; import type { Field, StringField, @@ -26,7 +26,7 @@ import type { TimeField, TimestampField, ContainerField, -} from "../src/index"; +} from "@proofkit/fmodata"; config({ path: path.resolve(__dirname, "../.env.local") }); diff --git a/packages/fmodata/tests/scripts.test.ts b/packages/fmodata/tests/scripts.test.ts index 4931365a..f669a6b8 100644 --- a/packages/fmodata/tests/scripts.test.ts +++ b/packages/fmodata/tests/scripts.test.ts @@ -4,70 +4,17 @@ * Tests for running FileMaker scripts via the OData API. */ -import { describe, it, expect, expectTypeOf } from "vitest"; +import { describe, it, expectTypeOf } from "vitest"; import { z } from "zod/v4"; -import { defineBaseTable, defineTableOccurrence, buildOccurrences } from "../src/index"; import { jsonCodec } from "./utils/helpers"; import { createMockClient } from "./utils/test-setup"; -import { InferSchemaType } from "../src/types"; describe("scripts", () => { const client = createMockClient(); - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - hobby: z.string().optional(), - }, - idField: "id", - }); - - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - }, - idField: "id", - }); - - const _testTO = defineTableOccurrence({ - name: "test", - baseTable: defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - }, - idField: "id", - }), - }); - - // Phase 1: Define base TOs (without navigation) - const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - }); - - const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - }); - - // Phase 2: Build final TOs with navigation - const [contactsTO, usersTO, testTO] = buildOccurrences({ - occurrences: [_contactsTO, _usersTO, _testTO], - navigation: { - contacts: ["users"], - users: ["contacts", "test"], - }, - }); - it("should handle expands", () => { expectTypeOf(client.listDatabaseNames).returns.resolves.toBeArray(); - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); expectTypeOf(db.listTableNames).returns.resolves.toBeArray(); @@ -81,9 +28,7 @@ describe("scripts", () => { }); it("should allow script param", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); () => { // don't actual run these calls, we're just checking the types @@ -109,9 +54,7 @@ describe("scripts", () => { it("should throw a type error if script name is invalid string", () => { // OData doesn't support script names with special characters (for example, @, &, /) or script names beginning with a number. - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); () => { // don't actual run these calls, we're just checking the types @@ -127,9 +70,7 @@ describe("scripts", () => { }); it("should validate/transform script result if schema provided", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); () => { // don't actual run these calls, we're just checking the types diff --git a/packages/fmodata/tests/tsconfig.build.json b/packages/fmodata/tests/tsconfig.build.json new file mode 100644 index 00000000..b90b2dc4 --- /dev/null +++ b/packages/fmodata/tests/tsconfig.build.json @@ -0,0 +1,37 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + /* Strict any checking for tests */ + "noImplicitAny": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "strictBindCallApply": true, + "strictPropertyInitialization": true, + "noImplicitThis": true, + "alwaysStrict": true, + + /* Disallow explicit any (using @ts-expect-error for intentional any testing) */ + /* Note: TypeScript doesn't have a built-in noExplicitAny, but we can use eslint */ + + /* Path mappings for package name imports - point to dist for build testing */ + "baseUrl": "..", + "paths": { + "@proofkit/fmodata": ["./dist/esm"], + "@proofkit/fmodata/*": ["./dist/esm/*"] + }, + + /* Ensure node_modules types are accessible */ + "skipLibCheck": false, + "moduleResolution": "Bundler", + + /* Include test files */ + "rootDir": "..", + "outDir": "../dist" + }, + "include": [ + "../dist/esm/**/*.d.ts", + "../node_modules/@fetchkit/**/*.d.ts", + "./**/*.ts" + ], + "exclude": ["../src/**/*", "../dist/**/*.js", "../dist/**/*.js.map"] +} diff --git a/packages/fmodata/tests/tsconfig.json b/packages/fmodata/tests/tsconfig.json index 01fd202f..b738ae13 100644 --- a/packages/fmodata/tests/tsconfig.json +++ b/packages/fmodata/tests/tsconfig.json @@ -13,6 +13,13 @@ /* Disallow explicit any (using @ts-expect-error for intentional any testing) */ /* Note: TypeScript doesn't have a built-in noExplicitAny, but we can use eslint */ + /* Path mappings for package name imports */ + "baseUrl": "..", + "paths": { + "@proofkit/fmodata": ["./src"], + "@proofkit/fmodata/*": ["./src/*"] + }, + /* Include test files */ "rootDir": "..", "outDir": "../dist" diff --git a/packages/fmodata/tests/typescript.test.ts b/packages/fmodata/tests/typescript.test.ts index 98d61407..5b286569 100644 --- a/packages/fmodata/tests/typescript.test.ts +++ b/packages/fmodata/tests/typescript.test.ts @@ -21,34 +21,24 @@ import { describe, expect, it, expectTypeOf, beforeEach } from "vitest"; import { z } from "zod/v4"; import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, + fmTableOccurrence, + textField, + numberField, FMServerConnection, -} from "../src/index"; + FMTable, + getTableColumns, + eq, +} from "@proofkit/fmodata"; import { createMockFetch } from "./utils/mock-fetch"; -import { - createMockClient, - occurrences, - occurrencesWithIds, -} from "./utils/test-setup"; +import { createMockClient, contacts, users } from "./utils/test-setup"; describe("fmodata", () => { - it("should be defined", () => { - expect(true).toBe(true); - }); - describe("API ergonomics", () => { - let client: FMServerConnection; - let db: ReturnType; - - beforeEach(() => { - client = createMockClient(); - db = client.database("Contacts"); - }); + const client = createMockClient(); + const db = client.database("TestDB"); it("should support list() with query chaining", () => { - const table = db.from("Contacts"); + const table = db.from(contacts); const listBuilder = table.list(); expect(listBuilder).toBeDefined(); @@ -56,7 +46,7 @@ describe("fmodata", () => { }); it("should support get() for single record retrieval", () => { - const table = db.from("Contacts"); + const table = db.from(contacts); const getBuilder = table.get("my-uuid"); expect(getBuilder).toBeDefined(); @@ -64,60 +54,71 @@ describe("fmodata", () => { }); it("should support getSingleField() API", () => { - const table = db.from("Contacts"); - const singleFieldBuilder = table.get("my-uuid").getSingleField("address"); + const table = db.from(contacts); + const singleFieldBuilder = table + .get("my-uuid") + .getSingleField(contacts.name); expect(singleFieldBuilder).toBeDefined(); expect(singleFieldBuilder.getRequestConfig).toBeDefined(); }); it("should support select() for returning arrays of records", () => { - const table = db.from("Contacts"); - const selectBuilder = table.list().select("email", "city"); + const table = db.from(contacts); + const selectBuilder = table + .list() + .select({ name: contacts.name, hobby: contacts.hobby }); expect(selectBuilder).toBeDefined(); expect(selectBuilder.getQueryString).toBeDefined(); }); it("should support single() modifier on select()", () => { - const table = db.from("Contacts"); - const singleSelectBuilder = table.list().select("email", "city").single(); + const table = db.from(contacts); + const singleSelectBuilder = table + .list() + .select({ name: contacts.name, hobby: contacts.hobby }) + .single(); expect(singleSelectBuilder).toBeDefined(); expect(singleSelectBuilder.getQueryString).toBeDefined(); }); it("should generate query strings correctly", () => { - const table = db.from("Contacts"); - const queryString = table.list().select("email", "city").getQueryString(); + const table = db.from(contacts); + const queryString = table + .list() + .select({ name: contacts.name, hobby: contacts.hobby }) + .getQueryString(); expect(queryString).toBeDefined(); expect(typeof queryString).toBe("string"); }); it("should infer field names for select() based on schema", () => { - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - email: z.string(), - age: z.number(), - }, - idField: "id", - }); - - const users = defineTableOccurrence({ - name: "Users", - baseTable: usersBase, + const users = fmTableOccurrence("Users", { + id: textField().primaryKey(), + name: textField(), + email: textField(), + age: numberField(), }); - const dbTyped = client.database("TestDB", { occurrences: [users] }); - const entitySet = dbTyped.from("Users"); + const db = client.database("TestDB"); + const entitySet = db.from(users); // These should have autocomplete for "id", "name", "email", "age" - const query1 = entitySet.list().select("id", "name"); - const query2 = entitySet.list().select("email", "age"); - const query3 = entitySet.list().select("id", "name", "email", "age"); + const query1 = entitySet + .list() + .select({ id: users.id, name: users.name }); + const query2 = entitySet + .list() + .select({ email: users.email, age: users.age }); + const query3 = entitySet.list().select({ + id: users.id, + name: users.name, + email: users.email, + age: users.age, + }); expect(query1).toBeDefined(); expect(query2).toBeDefined(); @@ -125,105 +126,95 @@ describe("fmodata", () => { // These should be TypeScript errors - fields not in schema const _typeChecks = () => { - // @ts-expect-error - field not in schema + // @ts-expect-error - should pass an object entitySet.list().select("invalidField"); - - entitySet.list().select( - "name", - // @ts-expect-error - field not in schema - "nonexistentField", - ); - - entitySet.list().select( - // @ts-expect-error - field not in schema - "foo", - // even though these are also invalid, it's OK that they don't error because the first field is already showing the problem - "bar", - "baz", - ); + // @ts-expect-error - should pass an object + entitySet.list().select(""); + // @ts-expect-error - should pass an object with column references + entitySet.list().select({ invalidField: true }); + entitySet.list().select({ + age: users.age, + // @ts-expect-error - column must be from the correct table + name: contacts.name, + }); }; void _typeChecks; }); it("should infer field names for select() with entity IDs", () => { - const productsBase = defineBaseTable({ - schema: { - productId: z.string(), - productName: z.string(), - price: z.number(), - category: z.string(), - inStock: z.boolean(), + const products = fmTableOccurrence( + "Products", + { + productId: textField() + .primaryKey() + .readOnly() + .entityId("FMFID:1000001"), + productName: textField().entityId("FMFID:1000002"), + price: numberField().entityId("FMFID:1000003"), + category: textField().entityId("FMFID:1000004"), + inStock: numberField() + .readValidator(z.coerce.boolean()) + .entityId("FMFID:1000005"), }, - idField: "productId", - readOnly: ["productId"], - fmfIds: { - productId: "FMFID:1000001", - productName: "FMFID:1000002", - price: "FMFID:1000003", - category: "FMFID:1000004", - inStock: "FMFID:1000005", + { + entityId: "FMTID:2000001", }, - }); - - const products = defineTableOccurrence({ - name: "Products", - baseTable: productsBase, - fmtId: "FMTID:2000001", - }); + ); - const dbTyped = client.database("TestDB", { - occurrences: [products] as const, - }); - const entitySet = dbTyped.from("Products"); + const entitySet = db.from(products); // Type inspection to debug the issue - type BaseTableType = typeof productsBase; - // ^? Should show BaseTable with schema type OccurrenceType = typeof products; - // ^? Should show TableOccurrence with BaseTable + // ^? Should show FMTable with fields type EntitySetType = typeof entitySet; // ^? Should show EntitySet with schema // These should have autocomplete for "productId", "productName", "price", "category", "inStock" - const query1 = entitySet.list().select("productId", "productName"); + const query1 = entitySet.list().select({ + productId: products.productId, + productName: products.productName, + }); const listQuery = entitySet.list(); type ListQueryType = typeof listQuery; // ^? First param should be schema type, not never type Autocomplete1 = Parameters[0]; // ^? - const query2 = entitySet.list().select("price", "category", "inStock"); - const query3 = entitySet - .list() - .select("productId", "productName", "price", "category", "inStock"); + const query2 = entitySet.list().select({ + price: products.price, + category: products.category, + inStock: products.inStock, + }); + const query3 = entitySet.list().select({ + productId: products.productId, + productName: products.productName, + price: products.price, + category: products.category, + inStock: products.inStock, + }); expect(query1).toBeDefined(); expect(query2).toBeDefined(); expect(query3).toBeDefined(); - // These should be TypeScript errors - fields not in schema (same as regular BaseTable) + // These should be TypeScript errors - fields not in schema const _typeChecks = () => { - // @ts-expect-error - field not in schema + // @ts-expect-error - should pass an object entitySet.list().select("invalidField"); - - entitySet.list().select( - "productName", - // @ts-expect-error - field not in schema - "nonexistentField", - ); - - entitySet.list().select( - // @ts-expect-error - field not in schema - "foo", - // even though these are also invalid, it's OK that they don't error because the first field is already showing the problem - "bar", - "baz", - ); + // @ts-expect-error - should pass an object + entitySet.list().select(""); + // @ts-expect-error - should pass an object with column references + entitySet.list().select({ invalidField: true }); + entitySet.list().select({ + anyName: products.productName, + // @ts-expect-error - column must be from the correct table + name: contacts.name, + }); }; void _typeChecks; }); it("should not allow getQueryString() on EntitySet directly", () => { - const entitySet = db.from("Users"); + const entitySet = db.from(users); // TypeScript should error if trying to call getQueryString() directly on EntitySet // You must first call a method like list(), select(), filter(), etc. to get a QueryBuilder @@ -244,45 +235,33 @@ describe("fmodata", () => { const client = createMockClient(); it("should create BaseTable and TableOccurrence", () => { - const baseTable = defineBaseTable({ - schema: { - id: z.number(), - name: z.string(), - email: z.string(), - }, - idField: "id", + const tableOcc = fmTableOccurrence("Users", { + id: numberField().primaryKey(), + name: textField(), + email: textField(), }); - const tableOcc = defineTableOccurrence({ - name: "Users", - baseTable, - }); - - expect(tableOcc.name).toBe("Users"); - expect(tableOcc.baseTable).toBe(baseTable); - expect(tableOcc.baseTable.schema).toBeDefined(); - expect(tableOcc.baseTable.idField).toBe("id"); + // Check that the table has the expected name via Symbol + expect((tableOcc as any)[FMTable.Symbol.Name]).toBe("Users"); + expect((tableOcc as any)[FMTable.Symbol.Schema]).toBeDefined(); + expect((tableOcc as any)[FMTable.Symbol.BaseTableConfig].idField).toBe( + "id", + ); }); it("should use TableOccurrence with database.from()", () => { - const baseTable = defineBaseTable({ - schema: { - id: z.number(), - name: z.string(), - email: z.string(), - }, - idField: "id", + const users = fmTableOccurrence("Users", { + id: numberField().primaryKey(), + name: textField(), + email: textField(), }); - const users = defineTableOccurrence({ - name: "Users", - baseTable, - }); - - const db = client.database("TestDB", { occurrences: [users] }); - const entitySet = db.from("Users"); + const db = client.database("TestDB"); + const entitySet = db.from(users); - const queryBuilder = entitySet.list().select("id", "name"); + const queryBuilder = entitySet + .list() + .select({ id: users.id, name: users.name }); expect(queryBuilder).toBeDefined(); expect(queryBuilder.getQueryString()).toContain("$select"); @@ -292,141 +271,70 @@ describe("fmodata", () => { }); it("should allow table occurrences to be reused across different contexts", () => { - const baseTable = defineBaseTable({ - schema: { - id: z.number(), - name: z.string(), - }, - idField: "id", - }); - - const products = defineTableOccurrence({ - name: "Products", - baseTable, + const products = fmTableOccurrence("Products", { + id: numberField().primaryKey(), + name: textField(), }); const client1 = createMockClient(); const client2 = createMockClient(); - const db1 = client1.database("DB1", { occurrences: [products] }); - const db2 = client2.database("DB2", { occurrences: [products] }); + const db1 = client1.database("DB1"); + const db2 = client2.database("DB2"); - const entitySet1 = db1.from("Products"); - const entitySet2 = db2.from("Products"); + const entitySet1 = db1.from(products); + const entitySet2 = db2.from(products); expect(entitySet1.get("1").getRequestConfig().url).toContain("Products"); expect(entitySet2.get("1").getRequestConfig().url).toContain("Products"); }); - it("should support navigation properties with buildOccurrences", () => { - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - email: z.string(), + it("should support navigation properties with navigationPaths", () => { + const users = fmTableOccurrence( + "Users", + { + id: textField().primaryKey(), + name: textField(), + email: textField(), }, - idField: "id", - }); - - const ordersBase = defineBaseTable({ - schema: { - orderId: z.string(), - userId: z.string(), - total: z.number(), + { + navigationPaths: ["Orders"], }, - idField: "orderId", - }); - - const _users = defineTableOccurrence({ - name: "Users" as const, - baseTable: usersBase, - }); - - const _orders = defineTableOccurrence({ - name: "Orders" as const, - baseTable: ordersBase, - }); + ); - const [users, orders] = buildOccurrences({ - occurrences: [_users, _orders], - navigation: { - Users: ["Orders"], - Orders: ["Users"], + const orders = fmTableOccurrence( + "Orders", + { + orderId: textField().primaryKey(), + userId: textField(), + total: numberField(), }, - }); + { + navigationPaths: ["Users"], + }, + ); - expect(users.navigation.Orders).toBeDefined(); - expect(orders.navigation.Users).toBeDefined(); + expect((users as any)[FMTable.Symbol.NavigationPaths]).toContain( + "Orders", + ); + expect((orders as any)[FMTable.Symbol.NavigationPaths]).toContain( + "Users", + ); }); it("should support base table without idField", () => { - const categoriesBase = defineBaseTable({ - schema: { - categoryId: z.string(), - name: z.string(), - description: z.string(), - }, - // idField is undefined - should be valid - }); - - const categories = defineTableOccurrence({ - name: "Categories", - baseTable: categoriesBase, + const categories = fmTableOccurrence("Categories", { + categoryId: textField(), + name: textField(), + description: textField(), + // No primaryKey() - idField is undefined }); - expect(categories.name).toBe("Categories"); - expect(categories.baseTable.idField).toBeUndefined(); - expect(categories.baseTable.schema).toBeDefined(); - }); - }); - - describe("Untyped queries", () => { - const client = createMockClient(); - const db = client.database("TestDB"); - - it("should support untyped queries without occurrences", () => { - const entitySet = db.from("AnyTable"); - expect(entitySet).toBeDefined(); - - const queryBuilder = entitySet.list().select("field1", "field2"); - expect(queryBuilder).toBeDefined(); - expect(queryBuilder.getQueryString()).toContain("$select"); - - const recordBuilder = entitySet.get("123"); - expect(recordBuilder).toBeDefined(); - expect(recordBuilder.getRequestConfig().url).toContain("AnyTable"); - - async () => { - // just checking types, don't execute - const result = await queryBuilder.execute(); - - const singleResult = result.data![0]!; - // @ts-expect-error - should not be on the object - singleResult["@id"]; - // @ts-expect-error - should not be on the object - singleResult["@editLink"]; - - expectTypeOf(singleResult).not.toExtend<{ - "@id": string; - "@editLink": string; - }>(); - }; - - async () => { - // just checking types, don't execute - const result = await queryBuilder.execute({ - includeODataAnnotations: true, - }); - - const singleResult = result.data![0]!; - singleResult["@id"]; // @ts should not error this time - singleResult["@editLink"]; // @ts should not error this time - - expectTypeOf(singleResult).toExtend<{ - "@id": string; - "@editLink": string; - }>(); - }; + expect((categories as any)[FMTable.Symbol.Name]).toBe("Categories"); + expect( + (categories as any)[FMTable.Symbol.BaseTableConfig].idField, + ).toBeUndefined(); + expect((categories as any)[FMTable.Symbol.Schema]).toBeDefined(); }); }); @@ -449,22 +357,16 @@ describe("fmodata", () => { }, }); - const usersTO = defineTableOccurrence({ - name: "Users", - baseTable: defineBaseTable({ - schema: { - id: z.number(), - name: z.string(), - active: z.coerce.boolean(), - activeHuman: z.enum(["active", "inactive"]), - }, - idField: "id", - }), + const usersTO = fmTableOccurrence("Users", { + id: numberField().primaryKey(), + name: textField().notNull(), + active: numberField().readValidator(z.coerce.boolean()).notNull(), + activeHuman: textField().readValidator(z.enum(["active", "inactive"])), }); - const db = client.database("TestDB", { occurrences: [usersTO] }); - const users = db.from("Users"); - const result = await users.list().execute(); + const db = client.database("TestDB"); + const usersQuery = db.from(usersTO); + const result = await usersQuery.list().execute(); if (!result.data || !result.data[0]) { console.error(result); @@ -480,6 +382,25 @@ describe("fmodata", () => { "active" | "inactive" >(); + const result2 = await usersQuery + .list() + .select(getTableColumns(usersTO)) + .execute(); + + if (!result2.data || !result2.data[0]) { + console.error(result); + throw new Error("Expected at least one result"); + } + + const firstResult2 = result2.data[0]; + + expectTypeOf(firstResult2.name).toEqualTypeOf(); + expectTypeOf(firstResult2.active).toEqualTypeOf(); + expect(firstResult2.active).toBe(false); + expectTypeOf(firstResult2.activeHuman).toEqualTypeOf< + "active" | "inactive" + >(); + expect(result).toBeDefined(); expect(result.data).toBeDefined(); expect(result.data?.length).toBe(1); @@ -505,12 +426,10 @@ describe("fmodata", () => { it("should support single field orderBy with default ascending", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); // ✅ Single field name - defaults to ascending - const query = db.from("users").list().orderBy("name"); + const query = db.from(users).list().orderBy("name"); expect(query).toBeDefined(); expect(query.getQueryString()).toContain("$orderby"); @@ -523,14 +442,12 @@ describe("fmodata", () => { it("should support tuple syntax for single field with explicit direction", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); // ✅ Tuple syntax: [fieldName, direction] // Second value autocompletes to "asc" | "desc" ONLY - const ascQuery = db.from("users").list().orderBy(["name", "asc"]); - const descQuery = db.from("users").list().orderBy(["id", "desc"]); + const ascQuery = db.from(users).list().orderBy(["name", "asc"]); + const descQuery = db.from(users).list().orderBy(["id", "desc"]); expect(ascQuery.getQueryString()).toContain("$orderby"); expect(ascQuery.getQueryString()).toBe( @@ -548,39 +465,35 @@ describe("fmodata", () => { it("should support tuple syntax with entity IDs and transform field names to FMFIDs", () => { const client = createMockClient(); - const db = client.database("test.fmp12", { - occurrences: occurrencesWithIds, - }); + const db = client.database("test.fmp12"); // ✅ Tuple syntax: [fieldName, direction] // Field names are transformed to FMFIDs in the query string // Table name is also transformed to FMTID when using entity IDs - const ascQuery = db.from("users").list().orderBy(["name", "asc"]); - const descQuery = db.from("users").list().orderBy(["id", "desc"]); + const ascQuery = db.from(users).list().orderBy(["name", "asc"]); + const descQuery = db.from(users).list().orderBy(["id", "desc"]); expect(ascQuery.getQueryString()).toContain("$orderby"); expect(ascQuery.getQueryString()).toBe( - "/FMTID:1065093?$orderby=FMFID:6 asc&$top=1000", + "/users?$orderby=name asc&$top=1000", ); expect(descQuery.getQueryString()).toContain("$orderby"); expect(descQuery.getQueryString()).toBe( - "/FMTID:1065093?$orderby=FMFID:1 desc&$top=1000", + "/users?$orderby=id desc&$top=1000", ); // ✅ Second value must be "asc" or "desc" - field names are rejected // @ts-expect-error - "name" is not a valid direction - db.from("users").list().orderBy(["name", "name"]); + db.from(users).list().orderBy(["name", "name"]); }); it("should support array of tuples for multiple fields", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); // ✅ Array of tuples for multiple fields with explicit directions const query = db - .from("users") + .from(users) .list() .orderBy([ ["name", "asc"], @@ -593,15 +506,13 @@ describe("fmodata", () => { it("should chain orderBy with other query methods", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); const query = db - .from("users") + .from(users) .list() - .select("name", "id", "active") - .filter({ active: { eq: true } }) + .select({ name: users.name, id: users.id, active: users.active }) + .where(eq(users.active, true)) .orderBy(["name", "asc"]) .top(10) .skip(0); @@ -615,17 +526,6 @@ describe("fmodata", () => { expect(queryString).toContain("$skip"); }); - it("should allow raw string orderBy for untyped databases (escape hatch)", () => { - const client = createMockClient(); - const untypedDb = client.database("TestDB"); // No schema - - // For untyped databases, string passthrough is allowed as escape hatch - const query = untypedDb.from("AnyTable").list().orderBy("someField desc"); - - expect(query.getQueryString()).toContain("$orderby"); - expect(query.getQueryString()).toContain("someField"); - }); - /** * Type error tests - validates compile-time type checking for orderBy. * @@ -636,9 +536,7 @@ describe("fmodata", () => { */ it("should reject invalid usage at compile time", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); + const db = client.database("fmdapi_test.fmp12"); const _typeChecks = () => { // ✅ Invalid field name is caught diff --git a/packages/fmodata/tests/update.test.ts b/packages/fmodata/tests/update.test.ts index 51ed4b57..d42410a6 100644 --- a/packages/fmodata/tests/update.test.ts +++ b/packages/fmodata/tests/update.test.ts @@ -8,177 +8,123 @@ import { describe, it, expect, expectTypeOf, vi } from "vitest"; import { z } from "zod/v4"; import { - defineBaseTable, - defineTableOccurrence, - buildOccurrences, -} from "../src/index"; -import { InsertBuilder } from "../src/client/insert-builder"; -import { UpdateBuilder } from "../src/client/update-builder"; -import { ExecutableUpdateBuilder } from "../src/client/update-builder"; -import { InferSchemaType } from "../src/types"; -import type { ODataRecordMetadata } from "../src/types"; + fmTableOccurrence, + textField, + numberField, + type InferTableSchema, + eq, + and, + lt, + Result, +} from "@proofkit/fmodata"; +import { InsertBuilder } from "@proofkit/fmodata/client/insert-builder"; +import { UpdateBuilder } from "@proofkit/fmodata/client/update-builder"; +import { ExecutableUpdateBuilder } from "@proofkit/fmodata/client/update-builder"; import { simpleMock } from "./utils/mock-fetch"; import { createMockClient } from "./utils/test-setup"; describe("insert and update methods", () => { const client = createMockClient(); - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - hobby: z.string().optional(), + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey(), + name: textField().notNull(), + hobby: textField(), }, - idField: "id", - }); - - const usersBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string().nullable(), - count: z.number().nullable(), - active: z.boolean().default(true), + { + navigationPaths: ["users"], }, - idField: "id", - }); - - // Users with required fields for insert - const usersWithRequiredBase = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - createdAt: z.string().optional(), + ); + + const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField(), + count: numberField(), + active: numberField() + .notNull() + .readValidator(z.coerce.boolean().default(true)) + .writeValidator(z.boolean().transform((v) => (v ? 1 : 0))), }, - idField: "id", - required: ["username", "email"], - }); - - const _testTO = defineTableOccurrence({ - name: "test", - baseTable: defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - }, - idField: "id", - }), - }); - - // Phase 1: Define base TOs (without navigation) - const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - }); - - const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - }); - - // Phase 2: Build final TOs with navigation - const [contactsTO, usersTO, testTO] = buildOccurrences({ - occurrences: [_contactsTO, _usersTO, _testTO], - navigation: { - contacts: ["users"], - users: ["contacts", "test"], + { + navigationPaths: ["contacts", "test"], }, + ); + + // Users with required fields for insert + const usersWithRequired = fmTableOccurrence("usersWithRequired", { + id: textField().primaryKey(), + username: textField().notNull(), + email: textField().notNull(), + createdAt: textField(), }); - const usersWithRequiredTO = defineTableOccurrence({ - name: "usersWithRequired", - baseTable: usersWithRequiredBase, + const testTO = fmTableOccurrence("test", { + id: textField().primaryKey(), + name: textField().notNull(), }); - type UserFieldNames = keyof InferSchemaType; + type UserFieldNames = keyof InferTableSchema; describe("insert method", () => { it("should return InsertBuilder when called", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); - const result = db - .from("users") - .insert({ username: "test", active: true }); + const result = db.from(users).insert({ username: "test", active: true }); expect(result).toBeInstanceOf(InsertBuilder); }); it("should accept all fields as optional when no required specified", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); // @ts-expect-error - some fields are required, no empty object is allowed - db.from("users").insert({}); + db.from(users).insert({}); - // @ts-expect-error - a required fields is - db.from("users").insert({ username: "test" }); + // @ts-expect-error - a required fields is missing + db.from(users).insert({ username: "test" }); // Should accept all fields - db.from("users").insert({ + db.from(users).insert({ username: "test", email: "test@example.com", active: true, }); - - // Type check: all fields should be optional - expectTypeOf(db.from("users").insert) - .parameter(0) - .toMatchObjectType>>(); }); it("should require specified fields when required is set", () => { - const db = client.database("test_db", { - occurrences: [usersWithRequiredTO], - }); + const db = client.database("test_db"); // These should work - required fields are username and email - db.from("usersWithRequired").insert({ + db.from(usersWithRequired).insert({ username: "test", email: "test@example.com", }); - db.from("usersWithRequired").insert({ + db.from(usersWithRequired).insert({ username: "test", email: "test@example.com", }); // Type check: username and email should be required - expectTypeOf(db.from("usersWithRequired").insert) + expectTypeOf(db.from(usersWithRequired).insert) .parameter(0) .toHaveProperty("username"); - expectTypeOf(db.from("usersWithRequired").insert) + expectTypeOf(db.from(usersWithRequired).insert) .parameter(0) .toHaveProperty("email"); }); - it("should return InsertBuilder with correct types", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); - - const builder = db - .from("users") - .insert({ username: "test", active: true }); - - expectTypeOf(builder).toEqualTypeOf< - InsertBuilder, typeof usersTO> - >(); - }); - it("should have execute() that returns Result without ODataRecordMetadata by default", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); - const builder = db - .from("users") - .insert({ username: "test", active: true }); + const builder = db.from(users).insert({ username: "test", active: true }); expectTypeOf(builder.execute).returns.resolves.toMatchTypeOf<{ - data: InferSchemaType | undefined; + data: InferTableSchema | undefined; error: Error | undefined; }>(); }); @@ -186,58 +132,48 @@ describe("insert and update methods", () => { describe("update method with builder pattern", () => { it("should return UpdateBuilder when update() is called", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); - const result = db.from("users").update({ username: "newname" }); + const result = db.from(users).update({ username: "newname" }); expect(result).toBeInstanceOf(UpdateBuilder); }); it("should not have execute() on initial UpdateBuilder", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); - const updateBuilder = db.from("users").update({ username: "newname" }); + const updateBuilder = db.from(users).update({ username: "newname" }); // Type check: execute should not exist on UpdateBuilder expectTypeOf(updateBuilder).not.toHaveProperty("execute"); }); it("should return ExecutableUpdateBuilder after byId()", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const result = db - .from("users") + .from(users) .update({ username: "newname" }) .byId("user-123"); expect(result).toBeInstanceOf(ExecutableUpdateBuilder); }); it("should return ExecutableUpdateBuilder after where()", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const result = db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ active: true })); + .where((q) => q.where(eq(users.active, true))); expect(result).toBeInstanceOf(ExecutableUpdateBuilder); }); }); describe("update by ID", () => { it("should generate correct URL for update by ID", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const updateBuilder = db - .from("users") + .from(users) .update({ username: "newname" }) .byId("user-123"); const config = updateBuilder.getRequestConfig(); @@ -248,20 +184,17 @@ describe("insert and update methods", () => { }); it("should return updatedCount type for update by ID", async () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const updateBuilder = db - .from("users") + .from(users) .update({ username: "newname" }) .byId("user-123"); // Type check: execute should return Result<{ updatedCount: number }> - expectTypeOf(updateBuilder.execute).returns.resolves.toMatchTypeOf<{ - data: { updatedCount: number } | undefined; - error: Error | undefined; - }>(); + expectTypeOf(updateBuilder.execute).returns.resolves.toEqualTypeOf< + Result<{ updatedCount: number }> + >(); }); it("should execute update by ID and return count", async () => { @@ -271,12 +204,10 @@ describe("insert and update methods", () => { body: null, }); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(users) .update({ username: "newname" }) .byId("user-123") .execute({ fetchHandler: mockFetch }); @@ -289,14 +220,12 @@ describe("insert and update methods", () => { describe("update by filter", () => { it("should generate correct URL for update by filter", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const updateBuilder = db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ active: true })); + .where((q) => q.where(eq(users.active, true))); const config = updateBuilder.getRequestConfig(); @@ -307,18 +236,12 @@ describe("insert and update methods", () => { }); it("should support complex filters with QueryBuilder", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const updateBuilder = db - .from("users") + .from(users) .update({ active: false }) - .where((q) => - q.filter({ - and: [{ active: true }, { count: { lt: 5 } }], - }), - ); + .where((q) => q.where(and(eq(users.active, true), lt(users.count, 5)))); const config = updateBuilder.getRequestConfig(); @@ -327,14 +250,12 @@ describe("insert and update methods", () => { }); it("should support QueryBuilder chaining in where callback", () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const updateBuilder = db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ active: true }).top(10)); + .where((q) => q.where(eq(users.active, true)).top(10)); const config = updateBuilder.getRequestConfig(); @@ -344,14 +265,12 @@ describe("insert and update methods", () => { }); it("should return updatedCount result type for filter-based update", async () => { - const db = client.database("test_db", { - occurrences: [contactsTO, usersTO], - }); + const db = client.database("test_db"); const updateBuilder = db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ active: true })); + .where((q) => q.where(eq(users.active, true))); // Type check: execute should return Result<{ updatedCount: number }> expectTypeOf(updateBuilder.execute).returns.resolves.toMatchTypeOf<{ @@ -367,14 +286,12 @@ describe("insert and update methods", () => { body: null, }); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ active: true })) + .where((q) => q.where(eq(users.active, true))) .execute({ fetchHandler: mockFetch }); expect(result.error).toBeUndefined(); @@ -384,174 +301,145 @@ describe("insert and update methods", () => { describe("update with optional fields", () => { it("should allow all fields to be optional for updates", () => { - const db = client.database("test_db", { - occurrences: [usersWithRequiredTO], - }); + const db = client.database("test_db"); // All fields should be optional for updates (updateRequired removed) - db.from("usersWithRequired").update({ + db.from(usersWithRequired).update({ username: "test", }); - db.from("usersWithRequired").update({ + db.from(usersWithRequired).update({ email: "test@example.com", }); // Can update with empty object - db.from("usersWithRequired").update({}); + db.from(usersWithRequired).update({}); }); it("should keep all fields optional regardless of insert requirements", () => { - const usersForUpdate = defineBaseTable({ - schema: { - id: z.string(), - username: z.string(), - email: z.string(), - status: z.string(), - }, - idField: "id", - required: ["username", "email"] as const, // Required for insert, but not for update + const usersForUpdate = fmTableOccurrence("usersForUpdate", { + id: textField().primaryKey(), + username: textField().notNull(), // Required for insert, but not for update + email: textField().notNull(), // Required for insert, but not for update + status: textField(), }); - const usersForUpdateTO = defineTableOccurrence({ - name: "usersForUpdate", - baseTable: usersForUpdate, - }); - - const db = client.database("test_db", { - occurrences: [usersForUpdateTO], - }); + const db = client.database("test_db"); // All fields are optional for update, even those required for insert - db.from("usersForUpdate").update({ + db.from(usersForUpdate).update({ status: "active", }); - db.from("usersForUpdate").update({ + db.from(usersForUpdate).update({ username: "newname", }); - db.from("usersForUpdate").update({}); + db.from(usersForUpdate).update({}); }); }); describe("readOnly fields", () => { it("should exclude id field from insert automatically", () => { - const usersWithReadOnly = defineBaseTable({ - schema: { - id: z.string(), - createdAt: z.string(), - modifiedAt: z.string(), - username: z.string(), - email: z.string(), - }, - idField: "id", - readOnly: ["createdAt", "modifiedAt"] as const, + const usersWithReadOnly = fmTableOccurrence("usersWithReadOnly", { + id: textField().primaryKey(), + createdAt: textField().readOnly(), + modifiedAt: textField().readOnly(), + username: textField(), + email: textField(), }); - const usersWithReadOnlyTO = defineTableOccurrence({ - name: "usersWithReadOnly", - baseTable: usersWithReadOnly, + const db = client.database("test_db"); + + // id, createdAt, and modifiedAt should not be available for insert + db.from(usersWithReadOnly).insert({ + username: "john", + // email: "john@example.com", + + // @ts-expect-error - primary key should be readOnly by default + id: "123", }); - const db = client.database("test_db", { - occurrences: [usersWithReadOnlyTO], + db.from(usersWithReadOnly).insert({ + username: "john", + + // @ts-expect-error - createdAt should be readOnly + createdAt: "2025-01-01", }); - // id, createdAt, and modifiedAt should not be available for insert - db.from("usersWithReadOnly").insert({ + db.from(usersWithReadOnly).insert({ username: "john", - email: "john@example.com", + + // @ts-expect-error - createdAt should be readOnly + modifiedAt: "2025-01-01", }); // Type check: id, createdAt, modifiedAt should not be in insert data type - expectTypeOf(db.from("usersWithReadOnly").insert) + expectTypeOf(db.from(usersWithReadOnly).insert) .parameter(0) .not.toHaveProperty("id"); - expectTypeOf(db.from("usersWithReadOnly").insert) + expectTypeOf(db.from(usersWithReadOnly).insert) .parameter(0) .not.toHaveProperty("createdAt"); - expectTypeOf(db.from("usersWithReadOnly").insert) + expectTypeOf(db.from(usersWithReadOnly).insert) .parameter(0) .not.toHaveProperty("modifiedAt"); }); it("should exclude id field and readOnly fields from update", () => { - const usersWithReadOnly = defineBaseTable({ - schema: { - id: z.string(), - createdAt: z.string(), - modifiedAt: z.string(), - username: z.string(), - email: z.string(), - }, - idField: "id", - readOnly: ["createdAt", "modifiedAt"] as const, + const usersWithReadOnlyTO = fmTableOccurrence("usersWithReadOnly", { + id: textField().primaryKey(), + createdAt: textField().readOnly(), + modifiedAt: textField().readOnly(), + username: textField(), + email: textField(), }); - const usersWithReadOnlyTO = defineTableOccurrence({ - name: "usersWithReadOnly", - baseTable: usersWithReadOnly, - }); - - const db = client.database("test_db", { - occurrences: [usersWithReadOnlyTO], - }); + const db = client.database("test_db"); // id, createdAt, and modifiedAt should not be available for update - db.from("usersWithReadOnly").update({ + db.from(usersWithReadOnlyTO).update({ username: "newname", }); - db.from("usersWithReadOnly").update({ + db.from(usersWithReadOnlyTO).update({ email: "newemail@example.com", }); // Type check: id, createdAt, modifiedAt should not be in update data type - expectTypeOf(db.from("usersWithReadOnly").update) + expectTypeOf(db.from(usersWithReadOnlyTO).update) .parameter(0) .not.toHaveProperty("id"); - expectTypeOf(db.from("usersWithReadOnly").update) + expectTypeOf(db.from(usersWithReadOnlyTO).update) .parameter(0) .not.toHaveProperty("createdAt"); - expectTypeOf(db.from("usersWithReadOnly").update) + expectTypeOf(db.from(usersWithReadOnlyTO).update) .parameter(0) .not.toHaveProperty("modifiedAt"); }); it("should allow inserts without specifying readOnly fields", () => { - const usersWithReadOnly = defineBaseTable({ - schema: { - id: z.string(), - createdAt: z.string(), - username: z.string(), - email: z.string().nullable(), - }, - idField: "id", - readOnly: ["createdAt"] as const, - }); - - const usersWithReadOnlyTO = defineTableOccurrence({ - name: "usersWithReadOnly", - baseTable: usersWithReadOnly, + const usersWithReadOnlyTO = fmTableOccurrence("usersWithReadOnly", { + id: textField().primaryKey(), + createdAt: textField().readOnly(), + username: textField(), + email: textField(), // nullable by default }); - const db = client.database("test_db", { - occurrences: [usersWithReadOnlyTO], - }); + const db = client.database("test_db"); // Should work - id and createdAt are excluded automatically - db.from("usersWithReadOnly").insert({ + db.from(usersWithReadOnlyTO).insert({ username: "john", email: "john@example.com", }); // Should work - email is optional (nullable) - db.from("usersWithReadOnly").insert({ + db.from(usersWithReadOnlyTO).insert({ username: "jane", }); }); @@ -561,12 +449,10 @@ describe("insert and update methods", () => { it("should return error on failed update by ID", async () => { const mockFetch = vi.fn().mockRejectedValue(new Error("Network error")); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(users) .update({ username: "newname" }) .byId("user-123") .execute({ fetchHandler: mockFetch as any }); @@ -579,14 +465,12 @@ describe("insert and update methods", () => { it("should return error on failed update by filter", async () => { const mockFetch = vi.fn().mockRejectedValue(new Error("Network error")); - const db = client.database("test_db", { - occurrences: [usersTO], - }); + const db = client.database("test_db"); const result = await db - .from("users") + .from(users) .update({ active: false }) - .where((q) => q.filter({ active: true })) + .where((q) => q.where(eq(users.active, true))) .execute({ fetchHandler: mockFetch as any }); expect(result.data).toBeUndefined(); diff --git a/packages/fmodata/tests/use-entity-ids-override.test.ts b/packages/fmodata/tests/use-entity-ids-override.test.ts index c2716df5..ceed3e46 100644 --- a/packages/fmodata/tests/use-entity-ids-override.test.ts +++ b/packages/fmodata/tests/use-entity-ids-override.test.ts @@ -10,11 +10,23 @@ import { describe, it, expect } from "vitest"; import { z } from "zod/v4"; import { FMServerConnection, - defineBaseTable, - defineTableOccurrence, -} from "../src/index"; + fmTableOccurrence, + textField, +} from "@proofkit/fmodata"; import { simpleMock } from "./utils/mock-fetch"; +// Create database with entity IDs +const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), + }, + { + entityId: "FMTID:100", + }, +); + describe("Per-request useEntityIds override", () => { it("should allow disabling entity IDs for a specific request", async () => { // Create connection with entity IDs enabled by default @@ -23,35 +35,14 @@ describe("Per-request useEntityIds override", () => { auth: { username: "test", password: "test" }, }); - // Create database with entity IDs - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", - }, - }); - - const contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - fmtId: "FMTID:100", - }); - - const db = connection.database("TestDB", { - occurrences: [contactsTO] as const, - }); + const db = connection.database("TestDB"); // First request: use default (should have entity ID header) await db - .from("contacts") + .from(contactsTO) .list() .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -63,11 +54,11 @@ describe("Per-request useEntityIds override", () => { // Second request: explicitly disable entity IDs for this request only await db - .from("contacts") + .from(contactsTO) .list() .execute({ useEntityIds: false, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -79,11 +70,11 @@ describe("Per-request useEntityIds override", () => { // Third request: explicitly enable entity IDs for this request await db - .from("contacts") + .from(contactsTO) .list() .execute({ useEntityIds: true, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -107,10 +98,10 @@ describe("Per-request useEntityIds override", () => { // First request: use default (should NOT have entity ID header) await db - .from("contacts") + .from(contactsTO) .list() .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -122,11 +113,11 @@ describe("Per-request useEntityIds override", () => { // Second request: explicitly enable entity IDs for this request only await db - .from("contacts") + .from(contactsTO) .list() .execute({ useEntityIds: true, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -138,10 +129,10 @@ describe("Per-request useEntityIds override", () => { // Third request: confirm default is still disabled await db - .from("contacts") + .from(contactsTO) .list() .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -158,34 +149,25 @@ describe("Per-request useEntityIds override", () => { auth: { username: "test", password: "test" }, }); - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", + { + entityId: "FMTID:100", }, - }); - - const contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - fmtId: "FMTID:100", - }); + ); - const db = connection.database("TestDB", { - occurrences: [contactsTO] as const, - }); + const db = connection.database("TestDB"); // Insert with default settings (entity IDs enabled) await db - .from("contacts") + .from(contactsTO) .insert({ name: "Test" }) .execute({ - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -200,11 +182,11 @@ describe("Per-request useEntityIds override", () => { // Insert with entity IDs disabled for this request await db - .from("contacts") + .from(contactsTO) .insert({ name: "Test" }) .execute({ useEntityIds: false, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -224,36 +206,27 @@ describe("Per-request useEntityIds override", () => { auth: { username: "test", password: "test" }, }); - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", + { + entityId: "FMTID:100", }, - }); + ); - const contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - fmtId: "FMTID:100", - }); - - const db = connection.database("TestDB", { - occurrences: [contactsTO] as const, - }); + const db = connection.database("TestDB"); // Update with entity IDs disabled await db - .from("contacts") + .from(contactsTO) .update({ name: "Updated" }) .byId("123") .execute({ useEntityIds: false, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -269,12 +242,12 @@ describe("Per-request useEntityIds override", () => { // Update with entity IDs enabled await db - .from("contacts") + .from(contactsTO) .update({ name: "Updated" }) .byId("123") .execute({ useEntityIds: true, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -295,36 +268,27 @@ describe("Per-request useEntityIds override", () => { auth: { username: "test", password: "test" }, }); - const contactsBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), + const contactsTO = fmTableOccurrence( + "contacts", + { + id: textField().primaryKey().entityId("FMFID:1"), + name: textField().entityId("FMFID:2"), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", + { + entityId: "FMTID:100", }, - }); - - const contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - fmtId: "FMTID:100", - }); + ); - const db = connection.database("TestDB", { - occurrences: [contactsTO] as const, - }); + const db = connection.database("TestDB"); // Delete with entity IDs enabled await db - .from("contacts") + .from(contactsTO) .delete() .byId("123") .execute({ useEntityIds: true, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string @@ -340,12 +304,12 @@ describe("Per-request useEntityIds override", () => { // Delete with entity IDs disabled await db - .from("contacts") + .from(contactsTO) .delete() .byId("123") .execute({ useEntityIds: false, - fetchHandler: (input, init) => { + fetchHandler: (input: RequestInfo | URL, init?: RequestInit) => { const headers = (init as RequestInit)?.headers as Record< string, string diff --git a/packages/fmodata/tests/utils/test-setup.ts b/packages/fmodata/tests/utils/test-setup.ts index 30cfc8b0..dd39c953 100644 --- a/packages/fmodata/tests/utils/test-setup.ts +++ b/packages/fmodata/tests/utils/test-setup.ts @@ -1,29 +1,29 @@ /** * Shared Test Setup Components * - * Provides reusable base tables, table occurrences, and mock client + * Provides reusable table occurrences and mock client * for use across test files. Based on e2e.test.ts schemas. */ import { FMServerConnection, - defineBaseTable, - defineTableOccurrence, - buildOccurrences, -} from "../../src/index"; + fmTableOccurrence, + textField, + numberField, + timestampField, + dateField, + type InferTableSchema, + type FieldBuilder, +} from "@proofkit/fmodata"; import { z } from "zod/v4"; -import { InferSchemaType } from "../../src/types"; -// Base Tables matching e2e.test.ts schemas - -export const usersSimpleBase = defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - // intentionally missing fields to test validation - }, - idField: "id", -}); +// Helper function for boolean fields (FileMaker stores as 0/1) +const booleanField = (): FieldBuilder => + numberField() + // Parses the number to a boolean when reading from the database + .readValidator(z.coerce.boolean()) + // Allows the user to pass a boolean when inserting or updating, converting it back to number + .writeValidator(z.boolean().transform((val) => (val ? 1 : 0))); export const hobbyEnum = z.enum([ "Board games", @@ -32,234 +32,180 @@ export const hobbyEnum = z.enum([ "Unknown", ]); -export const contactsBase = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - hobby: hobbyEnum.nullable().catch("Unknown"), - id_user: z.string().nullable(), +// Table occurrences using new ORM patterns + +export const contacts = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey(), + CreationTimestamp: timestampField().readOnly(), + CreatedBy: textField().readOnly(), + ModificationTimestamp: timestampField().readOnly(), + ModifiedBy: textField(), + name: textField(), + hobby: textField().readValidator(hobbyEnum.nullable().catch("Unknown")), + id_user: textField(), + image: containerField(), // should not be included in the default select when set to "all" or "schema" }, - idField: "PrimaryKey", -}); - -export const usersBase = defineBaseTable({ - schema: { - id: z.uuid(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - active: z.coerce.boolean(), - fake_field: z - .string() - .catch("I only exist in the schema, not the database"), - id_customer: z.string().nullable(), + { + defaultSelect: "all", + navigationPaths: ["users", "invoices"], }, - idField: "id", -}); - -export const invoicesBase = defineBaseTable({ - schema: { - id: z.string(), - invoiceNumber: z.string(), - id_contact: z.string().nullable(), - invoiceDate: z.string().nullable(), - dueDate: z.string().nullable(), - total: z.number().nullable(), - status: z.enum(["draft", "sent", "paid", "overdue"]).nullable(), +); + +export const users = fmTableOccurrence( + "users", + { + id: textField().primaryKey().readValidator(z.uuid()), + CreationTimestamp: timestampField(), + CreatedBy: textField(), + ModificationTimestamp: timestampField(), + ModifiedBy: textField(), + name: textField(), + active: booleanField(), + fake_field: textField().readValidator( + z.string().catch("I only exist in the schema, not the database"), + ), + id_customer: textField(), }, - idField: "id", -}); - -export const lineItemsBase = defineBaseTable({ - schema: { - id: z.string(), - id_invoice: z.string().nullable(), - description: z.string().nullable(), - quantity: z.number().nullable(), - unitPrice: z.number().nullable(), - lineTotal: z.number().nullable(), + { + defaultSelect: "all", + navigationPaths: ["contacts"], }, - idField: "id", -}); - -export const contactsBaseWithIds = defineBaseTable({ - schema: { - PrimaryKey: z.string(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - hobby: hobbyEnum.nullable().catch("Unknown"), - id_user: z.string().nullable(), +); + +export const invoices = fmTableOccurrence( + "invoices", + { + id: textField().primaryKey(), + invoiceNumber: textField().notNull(), + id_contact: textField(), + invoiceDate: dateField(), + dueDate: dateField(), + total: numberField(), + status: textField().readValidator( + z.enum(["draft", "sent", "paid", "overdue"]).nullable(), + ), }, - idField: "PrimaryKey", - fmfIds: { - PrimaryKey: "FMFID:10", - CreationTimestamp: "FMFID:11", - CreatedBy: "FMFID:12", - ModificationTimestamp: "FMFID:13", - ModifiedBy: "FMFID:14", - name: "FMFID:15", - hobby: "FMFID:16", - id_user: "FMFID:17", + { + defaultSelect: "all", + navigationPaths: ["lineItems", "contacts"], }, -}); - -export const usersBaseWithIds = defineBaseTable({ - schema: { - id: z.uuid(), - CreationTimestamp: z.string().nullable(), - CreatedBy: z.string().nullable(), - ModificationTimestamp: z.string().nullable(), - ModifiedBy: z.string().nullable(), - name: z.string().nullable(), - active: z.coerce.boolean(), - fake_field: z - .string() - .catch("I only exist in the schema, not the database"), - id_customer: z.string().nullable(), +); + +export const lineItems = fmTableOccurrence( + "lineItems", + { + id: textField().primaryKey(), + id_invoice: textField(), + description: textField(), + quantity: numberField(), + unitPrice: numberField(), + lineTotal: numberField(), }, - idField: "id", - fmfIds: { - id: "FMFID:1", - CreationTimestamp: "FMFID:2", - CreatedBy: "FMFID:3", - ModificationTimestamp: "FMFID:4", - ModifiedBy: "FMFID:5", - name: "FMFID:6", - active: "FMFID:7", - fake_field: "FMFID:8", - id_customer: "FMFID:9", + { + defaultSelect: "all", + navigationPaths: ["invoices"], }, -}); - -// Phase 1: Define base TableOccurrences (without navigation) -const _contactsTO = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBase, - defaultSelect: "all", -}); - -const _usersTO = defineTableOccurrence({ - name: "users", - baseTable: usersBase, - defaultSelect: "all", -}); - -const _invoicesTO = defineTableOccurrence({ - name: "invoices", - baseTable: invoicesBase, - defaultSelect: "all", -}); - -const _lineItemsTO = defineTableOccurrence({ - name: "lineItems", - baseTable: lineItemsBase, - defaultSelect: "all", -}); - -// Phase 2: Build final TOs with navigation -export const occurrences = buildOccurrences({ - occurrences: [_contactsTO, _usersTO, _invoicesTO, _lineItemsTO], - navigation: { - contacts: ["users", "invoices"], - users: ["contacts"], - invoices: ["lineItems", "contacts"], - lineItems: ["invoices"], +); + +// Table occurrences with entity IDs +export const contactsTOWithIds = fmTableOccurrence( + "contacts", + { + PrimaryKey: textField().primaryKey().entityId("FMFID:10"), + CreationTimestamp: timestampField().entityId("FMFID:11"), + CreatedBy: textField().entityId("FMFID:12"), + ModificationTimestamp: timestampField().entityId("FMFID:13"), + ModifiedBy: textField().entityId("FMFID:14"), + name: textField().entityId("FMFID:15"), + hobby: textField() + .entityId("FMFID:16") + .readValidator(hobbyEnum.nullable().catch("Unknown")), + id_user: textField().entityId("FMFID:17"), }, -}); + { + entityId: "FMTID:200", + useEntityIds: true, + defaultSelect: "all", + navigationPaths: ["users"], + }, +); + +export const usersTOWithIds = fmTableOccurrence( + "users", + { + id: textField().primaryKey().entityId("FMFID:1").readValidator(z.uuid()), + CreationTimestamp: timestampField().entityId("FMFID:2"), + CreatedBy: textField().entityId("FMFID:3"), + ModificationTimestamp: timestampField().entityId("FMFID:4"), + ModifiedBy: textField().entityId("FMFID:5"), + name: textField().entityId("FMFID:6"), + active: booleanField().entityId("FMFID:7"), + fake_field: textField() + .entityId("FMFID:8") + .readValidator( + z.string().catch("I only exist in the schema, not the database"), + ), + id_customer: textField().entityId("FMFID:9"), + }, + { + entityId: "FMTID:1065093", + useEntityIds: true, + defaultSelect: "all", + navigationPaths: ["contacts"], + }, +); -// Phase 1: Define base TOs with entity IDs (without navigation) -const _contactsTOWithIds = defineTableOccurrence({ - name: "contacts", - baseTable: contactsBaseWithIds, - fmtId: "FMTID:200", - defaultSelect: "all", +export const arbitraryTable = fmTableOccurrence("arbitrary_table", { + id: textField().primaryKey(), + name: textField().notNull(), }); -const _usersTOWithIds = defineTableOccurrence({ - name: "users", - baseTable: usersBaseWithIds, - fmtId: "FMTID:1065093", - defaultSelect: "all", +// Simple users table occurrence (same name as usersTO to test validation) +export const usersSimpleTO = fmTableOccurrence("users", { + id: textField().primaryKey().notNull(), + name: textField().notNull(), + // intentionally missing fields to test validation }); -// type check only, don't run this -() => { - buildOccurrences({ - occurrences: [_contactsTO, _usersTO], - navigation: { - // @ts-expect-error - navigation to self is not allowed - contacts: ["contacts"], - // @ts-expect-error - navigation to nonexistent table is not allowed - users: ["other"], - }, - }); +// Types - extract from table occurrences for backward compatibility +export type ContactSchema = InferTableSchema; +export type UserSchema = InferTableSchema; +export type InvoiceSchema = InferTableSchema; +export type LineItemSchema = InferTableSchema; - // Full navigation - buildOccurrences({ - occurrences: [_contactsTOWithIds, _usersTOWithIds], - navigation: { - contacts: ["users"], - users: ["contacts"], - }, - }); +// Backward-compatible base table exports for tests that need .schema property +// These extract the schema from the new FMTable instances +import { containerField, FMTable } from "@proofkit/fmodata"; - // Partial navigation - buildOccurrences({ - occurrences: [_contactsTOWithIds, _usersTOWithIds], - navigation: { - contacts: ["users"], - }, - }); +function getSchemaFromTable>(table: T) { + return (table as any)[FMTable.Symbol.Schema]; +} - // No navigation - buildOccurrences({ - occurrences: [_contactsTOWithIds, _usersTOWithIds], - }); -}; +// export const contactsBase = { +// schema: getSchemaFromTable(contactsTO), +// } as const; -// Phase 2: Build final TOs with navigation -export const occurrencesWithIds = buildOccurrences({ - occurrences: [_contactsTOWithIds, _usersTOWithIds], - navigation: { - contacts: ["users"], - users: ["contacts"], - }, -}); +// export const usersBase = { +// schema: getSchemaFromTable(usersTO), +// } as const; -export const usersSimpleTO = defineTableOccurrence({ - name: "users", // same name as usersTO to test validation - baseTable: usersSimpleBase, -}); +// export const invoicesBase = { +// schema: getSchemaFromTable(invoicesTO), +// } as const; -defineBaseTable({ - schema: { - id: z.string(), - name: z.string(), - // extra: z.string(), // try omitting this field - }, - idField: "id", - required: ["extra"], - fmfIds: { - id: "FMFID:1", - name: "FMFID:2", - extra: "FMFID:3", // no TS error - }, -}); +// export const lineItemsBase = { +// schema: getSchemaFromTable(lineItemsTO), +// } as const; + +// export const contactsBaseWithIds = { +// schema: getSchemaFromTable(contactsTOWithIds), +// } as const; -// Types -export type ContactSchema = InferSchemaType; -export type UserSchema = InferSchemaType; -export type InvoiceSchema = InferSchemaType; -export type LineItemSchema = InferSchemaType; +// export const usersBaseWithIds = { +// schema: getSchemaFromTable(usersTOWithIds), +// } as const; // Mock client factory - ensures unit tests never hit real databases export function createMockClient(): FMServerConnection { diff --git a/packages/fmodata/tests/validation.test.ts b/packages/fmodata/tests/validation.test.ts index 8e724c5d..81ba172b 100644 --- a/packages/fmodata/tests/validation.test.ts +++ b/packages/fmodata/tests/validation.test.ts @@ -19,25 +19,23 @@ import { createMockClient, hobbyEnum, usersSimpleTO, - occurrences, + contacts, + users, } from "./utils/test-setup"; import { z } from "zod/v4"; +import { fmTableOccurrence, textField } from "@proofkit/fmodata"; describe("Validation Tests", () => { const client = createMockClient(); - const db = client.database("fmdapi_test.fmp12", { - occurrences: occurrences, - }); - const simpleDb = client.database("fmdapi_test.fmp12", { - occurrences: [usersSimpleTO], - }); + const db = client.database("fmdapi_test.fmp12"); + const simpleDb = client.database("fmdapi_test.fmp12"); describe("validateRecord", () => { it("should validate a single record", async () => { const result = await db - .from("contacts") + .from(contacts) .list() - .select("hobby") + .select({ hobby: contacts.hobby }) .execute({ fetchHandler: simpleMock({ status: 200, @@ -63,9 +61,11 @@ describe("Validation Tests", () => { it("should validate records within an expand expression", async () => { const result = await db - .from("contacts") + .from(contacts) .list() - .expand("users", (b) => b.select("name", "fake_field")) + .expand(users, (b: any) => + b.select({ name: users.name, fake_field: users.fake_field }), + ) .execute({ fetchHandler: simpleMock({ status: 200, @@ -120,14 +120,23 @@ describe("Validation Tests", () => { }); }); it("should automatically select only fields in the schema", async () => { - const query = simpleDb.from("users").list(); + const simpleUsers = fmTableOccurrence("users", { + id: textField().primaryKey().notNull(), + name: textField().notNull(), + }); + const query = simpleDb.from(simpleUsers).list(); + + const queryString = query.getQueryString(); - expect(query.getQueryString()).toBe('/users?$select="id",name&$top=1000'); + expect(queryString).toContain(`$select=`); + expect(queryString).toContain(`name`); + expect(queryString).toContain(`"id"`); // must quote the id field + expect(queryString).not.toContain(`$expand`); }); it("should skip validation if requested", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .execute({ skipValidation: true, @@ -163,7 +172,7 @@ describe("Validation Tests", () => { it("should return odata annotations if requested, even if skipValidation is true", async () => { const result = await db - .from("contacts") + .from(contacts) .list() .execute({ skipValidation: true, diff --git a/packages/fmodata/vitest.config.ts b/packages/fmodata/vitest.config.ts index a93e7a2f..f01ce6aa 100644 --- a/packages/fmodata/vitest.config.ts +++ b/packages/fmodata/vitest.config.ts @@ -1,6 +1,14 @@ import { defineConfig } from "vitest/config"; +import { resolve } from "path"; export default defineConfig({ + resolve: { + alias: { + "@proofkit/fmodata": process.env.TEST_BUILD + ? resolve(__dirname, "./dist/esm") + : resolve(__dirname, "./src"), + }, + }, test: { testTimeout: 15000, // Exclude E2E tests from default test runs @@ -11,7 +19,9 @@ export default defineConfig({ typecheck: { enabled: true, include: ["src/**/*.ts", "tests/**/*.test.ts", "tests/**/*.test-d.ts"], - tsconfig: "./tests/tsconfig.json", + tsconfig: process.env.TEST_BUILD + ? "./tests/tsconfig.build.json" + : "./tests/tsconfig.json", }, }, }); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 9f2dee6d..dbf4bbc7 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -629,6 +629,9 @@ importers: '@types/node': specifier: ^22.17.1 version: 22.17.1 + fast-xml-parser: + specifier: ^5.3.2 + version: 5.3.2 prettier: specifier: ^3.5.3 version: 3.5.3 @@ -5467,6 +5470,10 @@ packages: fast-levenshtein@2.0.6: resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + fast-xml-parser@5.3.2: + resolution: {integrity: sha512-n8v8b6p4Z1sMgqRmqLJm3awW4NX7NkaKPfb3uJIBTSH7Pdvufi3PQ3/lJLQrvxcMYl7JI2jnDO90siPEpD8JBA==} + hasBin: true + fastq@1.19.1: resolution: {integrity: sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==} @@ -7309,9 +7316,6 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} - pkg-types@2.2.0: - resolution: {integrity: sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==} - pkg-types@2.3.0: resolution: {integrity: sha512-SIqCzDRg0s9npO5XQ3tNZioRY1uK06lA41ynBC1YmFTmnY6FjUjVt6s4LoADmwoig1qqD0oK8h1p/8mlMx8Oig==} @@ -8071,6 +8075,9 @@ packages: strip-literal@3.0.0: resolution: {integrity: sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==} + strnum@2.1.1: + resolution: {integrity: sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==} + strtok3@9.1.1: resolution: {integrity: sha512-FhwotcEqjr241ZbjFzjlIYg6c5/L/s4yBGWSMvJ9UoExiSqL+FnFA/CaeZx17WGaZMS/4SOZp8wH18jSS4R4lw==} engines: {node: '>=16'} @@ -12574,7 +12581,7 @@ snapshots: ohash: 2.0.11 pathe: 2.0.3 perfect-debounce: 1.0.0 - pkg-types: 2.2.0 + pkg-types: 2.3.0 rc9: 2.1.2 optionalDependencies: magicast: 0.3.5 @@ -13872,6 +13879,10 @@ snapshots: fast-levenshtein@2.0.6: {} + fast-xml-parser@5.3.2: + dependencies: + strnum: 2.1.1 + fastq@1.19.1: dependencies: reusify: 1.1.0 @@ -15791,7 +15802,7 @@ snapshots: citty: 0.1.6 consola: 3.4.2 pathe: 2.0.3 - pkg-types: 2.2.0 + pkg-types: 2.3.0 tinyexec: 0.3.2 oauth4webapi@2.17.0: {} @@ -16099,12 +16110,6 @@ snapshots: mlly: 1.7.4 pathe: 2.0.3 - pkg-types@2.2.0: - dependencies: - confbox: 0.2.2 - exsolve: 1.0.7 - pathe: 2.0.3 - pkg-types@2.3.0: dependencies: confbox: 0.2.2 @@ -17047,6 +17052,8 @@ snapshots: dependencies: js-tokens: 9.0.1 + strnum@2.1.1: {} + strtok3@9.1.1: dependencies: '@tokenizer/token': 0.3.0