From e3a35c280303ba6043afbd728355136ed2196bce Mon Sep 17 00:00:00 2001 From: egor Date: Thu, 15 Jan 2026 19:58:32 +0700 Subject: [PATCH] add convex pgbench like test (tpcb-like) --- examples/convex/pgbenchy/k6/.gitignore | 1 + examples/convex/pgbenchy/k6/Makefile | 58 +++ examples/convex/pgbenchy/k6/common.js | 99 ++++ examples/convex/pgbenchy/k6/summary.js | 79 ++++ examples/convex/pgbenchy/k6/tpcb-convex.js | 355 +++++++++++++++ examples/convex/pgbenchy/main.tf | 42 ++ .../pgbenchy/modules/script/entrypoint.sh.tpl | 35 ++ .../convex/pgbenchy/modules/script/main.tf | 164 +++++++ .../pgbenchy/modules/script/variables.tf | 139 ++++++ .../convex/pgbenchy/modules/setup/main.tf | 34 ++ .../pgbenchy/modules/setup/variables.tf | 15 + examples/convex/pgbenchy/project/.gitignore | 6 + .../project/convex/_generated/api.d.ts | 49 ++ .../pgbenchy/project/convex/_generated/api.js | 23 + .../project/convex/_generated/dataModel.d.ts | 60 +++ .../project/convex/_generated/server.d.ts | 143 ++++++ .../project/convex/_generated/server.js | 93 ++++ .../pgbenchy/project/convex/benchmark.ts | 424 ++++++++++++++++++ .../convex/pgbenchy/project/convex/schema.ts | 54 +++ examples/convex/pgbenchy/project/package.json | 29 ++ .../convex/pgbenchy/project/scripts/clear.js | 49 ++ .../convex/pgbenchy/project/scripts/run.sh | 55 +++ .../convex/pgbenchy/project/scripts/seed.js | 57 +++ examples/convex/pgbenchy/variables.tf | 158 +++++++ 24 files changed, 2221 insertions(+) create mode 100644 examples/convex/pgbenchy/k6/.gitignore create mode 100644 examples/convex/pgbenchy/k6/Makefile create mode 100644 examples/convex/pgbenchy/k6/common.js create mode 100644 examples/convex/pgbenchy/k6/summary.js create mode 100644 examples/convex/pgbenchy/k6/tpcb-convex.js create mode 100644 examples/convex/pgbenchy/main.tf create mode 100644 examples/convex/pgbenchy/modules/script/entrypoint.sh.tpl create mode 100644 examples/convex/pgbenchy/modules/script/main.tf create mode 100644 examples/convex/pgbenchy/modules/script/variables.tf create mode 100644 examples/convex/pgbenchy/modules/setup/main.tf create mode 100644 examples/convex/pgbenchy/modules/setup/variables.tf create mode 100644 examples/convex/pgbenchy/project/.gitignore create mode 100644 examples/convex/pgbenchy/project/convex/_generated/api.d.ts create mode 100644 examples/convex/pgbenchy/project/convex/_generated/api.js create mode 100644 examples/convex/pgbenchy/project/convex/_generated/dataModel.d.ts create mode 100644 examples/convex/pgbenchy/project/convex/_generated/server.d.ts create mode 100644 examples/convex/pgbenchy/project/convex/_generated/server.js create mode 100644 examples/convex/pgbenchy/project/convex/benchmark.ts create mode 100644 examples/convex/pgbenchy/project/convex/schema.ts create mode 100644 examples/convex/pgbenchy/project/package.json create mode 100755 examples/convex/pgbenchy/project/scripts/clear.js create mode 100755 examples/convex/pgbenchy/project/scripts/run.sh create mode 100755 examples/convex/pgbenchy/project/scripts/seed.js create mode 100644 examples/convex/pgbenchy/variables.tf diff --git a/examples/convex/pgbenchy/k6/.gitignore b/examples/convex/pgbenchy/k6/.gitignore new file mode 100644 index 0000000..13bfb2a --- /dev/null +++ b/examples/convex/pgbenchy/k6/.gitignore @@ -0,0 +1 @@ +summary.json diff --git a/examples/convex/pgbenchy/k6/Makefile b/examples/convex/pgbenchy/k6/Makefile new file mode 100644 index 0000000..37dde38 --- /dev/null +++ b/examples/convex/pgbenchy/k6/Makefile @@ -0,0 +1,58 @@ +.PHONY: tpcb tpcb-info + +MAKEFLAGS += -j2 + +# Existing configuration +rate ?= 2 +duration ?= 60 +rand = $(shell bash -c 'echo $$RANDOM') +testrun ?= "random-run-$(rand)" + +# TPC-B configuration +SCALE_FACTOR ?= 10 +CONNS ?= 10 +REQUESTS ?= 10 +RAMPING_DURATION ?= 10 +CONSECUTIVE_DURATION ?= 20 +RAMPS_COUNT ?= 1 + +tpcb-info: + @echo "=========================================" + @echo "TPC-B Benchmark Runner" + @echo "=========================================" + @echo "Configuration:" + @echo " CONVEX_URL: $(CONVEX_URL)" + @echo " SCALE_FACTOR: $(SCALE_FACTOR)" + @echo " CONNS: $(CONNS)" + @echo " REQUESTS: $(REQUESTS)" + @echo " RAMPING_DURATION: $(RAMPING_DURATION)s" + @echo " CONSECUTIVE_DURATION: $(CONSECUTIVE_DURATION)s" + @echo " RAMPS_COUNT: $(RAMPS_COUNT)" + @echo " TEST_RUN: $(testrun)" + @echo "=========================================" + @echo "" + +tpcb: tpcb-info + @if [ -z "$(CONVEX_URL)" ]; then \ + echo "Error: CONVEX_URL environment variable is not set"; \ + echo "Please set it to your Convex deployment URL"; \ + exit 1; \ + fi + @./k6 run \ + --out json=results_$(testrun).json \ + -e CONVEX_URL="$(CONVEX_URL)" \ + -e SCALE_FACTOR="$(SCALE_FACTOR)" \ + -e CONNS="$(CONNS)" \ + -e REQUESTS="$(REQUESTS)" \ + -e RAMPING_DURATION="$(RAMPING_DURATION)" \ + -e CONSECUTIVE_DURATION="$(CONSECUTIVE_DURATION)" \ + -e RAMPS_COUNT="$(RAMPS_COUNT)" \ + -e TEST_RUN="$(testrun)" \ + -o 'prometheus=namespace=k6' \ + tpcb-convex.js + @echo "" + @echo "=========================================" + @echo "Benchmark complete!" + @echo "Results saved to: results_$(testrun).json" + @echo "Summary saved to: summary.json" + @echo "=========================================" diff --git a/examples/convex/pgbenchy/k6/common.js b/examples/convex/pgbenchy/k6/common.js new file mode 100644 index 0000000..5fa3128 --- /dev/null +++ b/examples/convex/pgbenchy/k6/common.js @@ -0,0 +1,99 @@ +// common.js - Shared configuration for k6 TPC-B benchmarks + +/** + * Creates a ramping-vus scenario configuration + * + * @param {number} rampingDuration - Duration of each ramp stage in seconds + * @param {number} consecutiveDuration - Duration of the steady state in seconds + * @param {number} ramps - Number of ramp stages + * @param {number} conns - Target number of VUs (connections) + * @returns {object} k6 scenario configuration + */ +export function scenario(rampingDuration, consecutiveDuration, ramps, conns) { + const stages = [] + + // Build ramping stages + for (let i = 1; i <= ramps; i++) { + const targetVUs = Math.ceil((conns / ramps) * i) + + // Ramp up stage + stages.push({ + duration: `${rampingDuration}s`, + target: targetVUs, + }) + + // Steady state stage + stages.push({ + duration: `${consecutiveDuration}s`, + target: targetVUs, + }) + } + + return { + executor: 'ramping-vus', + startVUs: 0, + stages: stages, + gracefulRampDown: '30s', + } +} + +/** + * Alternative: constant arrival rate scenario + * Use this for strict TPS control + */ +export function constantRateScenario(targetTPS, duration, maxVUs) { + return { + executor: 'constant-arrival-rate', + rate: targetTPS, + timeUnit: '1s', + duration: `${duration}s`, + preAllocatedVUs: Math.ceil(maxVUs / 2), + maxVUs: maxVUs, + } +} + +/** + * Trend stats to include in summary + */ +export const trends = [ + 'avg', + 'min', + 'med', + 'max', + 'p(90)', + 'p(95)', + 'p(99)', + 'count', +] + +/** + * Default thresholds for TPC-B style workloads + */ +export const defaultThresholds = { + failed_requests: ['rate<0.1'], // Less than 10% failure rate + tx_trend: ['p(95)<1000'], // 95th percentile under 1 second + http_req_duration: ['p(95)<1000'], // HTTP latency under 1 second +} + +/** + * Strict thresholds for production-grade testing + */ +export const strictThresholds = { + failed_requests: ['rate<0.01'], // Less than 1% failure rate + tx_trend: ['p(95)<200'], // 95th percentile under 200ms + http_req_duration: ['p(95)<200'], +} + +/** + * Return a random integer between the minimum (inclusive) + * and maximum (exclusive) values + * @param {number} min - The minimum value to return. + * @param {number} max - The maximum value you want to return. + * @return {number} The random number between the min and max. + */ +export function getRandomInt(min, max) { + min = Math.ceil(min) + max = Math.floor(max) + // The maximum is exclusive and the minimum is inclusive + return Math.floor(Math.random() * (max - min) + min) +} \ No newline at end of file diff --git a/examples/convex/pgbenchy/k6/summary.js b/examples/convex/pgbenchy/k6/summary.js new file mode 100644 index 0000000..5f04bef --- /dev/null +++ b/examples/convex/pgbenchy/k6/summary.js @@ -0,0 +1,79 @@ +import http from 'k6/http' +import { textSummary } from 'https://jslib.k6.io/k6-summary/0.0.1/index.js' + +/* Setting up the environment variables for the test run. */ +const testrun = __ENV.TEST_RUN +const origin = __ENV.TEST_ORIGIN +const benchmark = __ENV.BENCHMARK_ID +const run = __ENV.RUN_ID +const token = __ENV.SUPABENCH_TOKEN +const supabench_uri = __ENV.SUPABENCH_URI + ? __ENV.SUPABENCH_URI + : 'http://localhost:8090' + +/** + * Handle summary implementation that additionally sends the data to the reports server. + */ +export function handleSummary(data) { + console.log('Preparing the end-of-test summary...') + const started = Date.now() + + // Send the results to remote server + if (!run) { + const report = { + output: textSummary(data, { indent: ' ', enableColors: false }), + raw: data, + benchmark_id: benchmark, + name: testrun ? testrun : null, + status: 'success', + origin: origin, + started_at: `${started - 60 * 1000}`, + ended_at: `${ + started + parseInt(data.state.testRunDurationMs) + 60 * 1000 + }`, + } + + const resp = http.post( + `${supabench_uri}/api/collections/runs/records`, + JSON.stringify(report), + { + headers: { + 'Content-Type': 'application/json', + Authorization: `Admin ${token}`, + }, + } + ) + if (resp.status != 200) { + console.error('Could not send summary, got status ' + resp.status) + } + } else { + const report = { + output: textSummary(data, { indent: ' ', enableColors: false }), + raw: data, + status: 'success', + started_at: `${started - 120 * 1000}`, + ended_at: `${ + started + parseInt(data.state.testRunDurationMs) + 15 * 1000 + }`, + } + + const resp = http.patch( + `${supabench_uri}/api/collections/runs/records/${run}`, + JSON.stringify(report), + { + headers: { + 'Content-Type': 'application/json', + Authorization: `Admin ${token}`, + }, + } + ) + if (resp.status != 200) { + console.error('Could not send summary, got status ' + resp.status) + } + } + + return { + stdout: textSummary(data, { indent: ' ', enableColors: true }), // Show the text summary to stdout... + 'summary.json': JSON.stringify(data), // and a JSON with all the details... + } +} diff --git a/examples/convex/pgbenchy/k6/tpcb-convex.js b/examples/convex/pgbenchy/k6/tpcb-convex.js new file mode 100644 index 0000000..dd90771 --- /dev/null +++ b/examples/convex/pgbenchy/k6/tpcb-convex.js @@ -0,0 +1,355 @@ +import { check, sleep, group } from 'k6' +import { vu, scenario } from 'k6/execution' +import { Rate, Trend, Counter } from 'k6/metrics' +import http from 'k6/http' + +import { scenario as sc, trends } from './common.js' +export { handleSummary } from './summary.js' + +// ============================================================================= +// CONFIGURATION +// ============================================================================= + +const convexUrl = __ENV.CONVEX_URL + ? __ENV.CONVEX_URL + : 'https://your-deployment.convex.cloud' + +const conns = __ENV.CONNS ? parseInt(__ENV.CONNS) : 10 +const requests = __ENV.REQUESTS ? parseInt(__ENV.REQUESTS) : 10 +const rampingDuration = __ENV.RAMPING_DURATION + ? parseInt(__ENV.RAMPING_DURATION) + : 1 +const consecutiveDuration = __ENV.CONSECUTIVE_DURATION + ? parseInt(__ENV.CONSECUTIVE_DURATION) + : 600 +const ramps = __ENV.RAMPS_COUNT ? parseInt(__ENV.RAMPS_COUNT) : 1 +const testRun = __ENV.TEST_RUN ? __ENV.TEST_RUN : 'default' +const scale = __ENV.SCALE_FACTOR ? parseInt(__ENV.SCALE_FACTOR) : 10 + +// ============================================================================= +// METRICS +// ============================================================================= + +const myFailRate = new Rate('failed_requests') +const counterTX = new Counter('tx') +const counterFailed = new Counter('failed') +const counterRetries = new Counter('retries') +const txTrend = new Trend('tx_trend', true) + +const to = { + failed_requests: ['rate<0.1'], + tx_trend: ['p(95)<1000'], +} + +// ============================================================================= +// K6 OPTIONS +// ============================================================================= + +export const options = { + setupTimeout: 40000000, + teardownTimeout: 40000000, + vus: 1, + thresholds: to, + summaryTrendStats: trends, + scenarios: { + convex_tpcb: sc( + rampingDuration, + consecutiveDuration, + ramps, + conns + ), + }, +} + +// ============================================================================= +// CONVEX API HELPERS +// ============================================================================= + +function convexMutation(path, args) { + const payload = JSON.stringify({ + path, + args, + format: 'json', + }) + + const params = { + headers: { + 'Content-Type': 'application/json', + }, + } + + return http.post(`${convexUrl}/api/mutation`, payload, params) +} + +function convexQuery(path, args) { + const payload = JSON.stringify({ + path, + args, + format: 'json', + }) + + const params = { + headers: { + 'Content-Type': 'application/json', + }, + } + + return http.post(`${convexUrl}/api/query`, payload, params) +} + +function checkConvexResponse(res, operation) { + if (res.status !== 200) { + console.log(`${operation} HTTP error: ${res.status}`) + return false + } + + try { + const body = res.json() + if (body.status === 'error') { + console.log(`${operation} Convex error: ${body.errorMessage}`) + return false + } + return true + } catch (e) { + console.log(`${operation} JSON parse error: ${e.message}`) + return false + } +} + +// ============================================================================= +// SETUP - Seed database +// ============================================================================= + +export function setup() { + console.log(`Setting up TPC-B benchmark with scale factor ${scale}`) + console.log(`Convex URL: ${convexUrl}`) + + // 1. Seed branches + console.log(`Seeding ${scale} branches...`) + let res = convexMutation('benchmark:seedBranches', { scale }) + if (!checkConvexResponse(res, 'seedBranches')) { + throw new Error('Failed to seed branches') + } + console.log(`Branches: ${res.json().value.seeded} seeded`) + + // 2. Seed tellers (10 per branch) + console.log(`Seeding ${scale * 10} tellers...`) + res = convexMutation('benchmark:seedTellers', { scale }) + if (!checkConvexResponse(res, 'seedTellers')) { + throw new Error('Failed to seed tellers') + } + console.log(`Tellers: ${res.json().value.seeded} seeded`) + + // 3. Seed accounts in batches (100,000 per scale factor) + const totalAccounts = 100000 * scale + const batchSize = 5000 // Adjust based on Convex timeout limits + console.log(`Seeding ${totalAccounts} accounts in batches of ${batchSize}...`) + + for (let startAid = 1; startAid <= totalAccounts; startAid += batchSize) { + const count = Math.min(batchSize, totalAccounts - startAid + 1) + res = convexMutation('benchmark:seedAccountBatch', { + startAid, + count, + scale, + }) + + if (!checkConvexResponse(res, `seedAccountBatch(${startAid})`)) { + throw new Error(`Failed to seed accounts batch starting at ${startAid}`) + } + + const progress = Math.min(100, Math.round((startAid / totalAccounts) * 100)) + if (startAid % (batchSize * 10) === 1 || startAid + batchSize > totalAccounts) { + console.log(`Accounts progress: ${progress}% (${startAid + count - 1}/${totalAccounts})`) + } + } + + // 4. Seed some initial history (optional, for more realistic workload) + const initialHistoryCount = 10000 + console.log(`Seeding ${initialHistoryCount} history records...`) + res = convexMutation('benchmark:seedHistory', { count: initialHistoryCount }) + if (!checkConvexResponse(res, 'seedHistory')) { + console.log('Warning: Failed to seed history (non-fatal)') + } else { + console.log(`History: ${res.json().value.seeded} seeded`) + } + + return { scale, convexUrl } +} + +// ============================================================================= +// MAIN TEST - TPC-B Transaction Loop +// ============================================================================= + +export default () => { + const name = vu.idInTest + + while (scenario.progress < 1) { + const start = new Date() + + for (let i = 1; i <= requests; i++) { + // Generate random transaction parameters (matching pgbench) + const tid = Math.floor(Math.random() * (10 * scale)) + 1 // Teller ID: 1 to 10*scale + const bid = Math.floor(Math.random() * scale) + 1 // Branch ID: 1 to scale + const aid = Math.floor(Math.random() * (100000 * scale)) + 1 // Account ID: 1 to 100000*scale + const delta = Math.floor(Math.random() * 1001) - 500 // Delta: -500 to 500 + + const exStart = new Date() + + // Retry logic for OCC failures + const maxRetries = 5 + let attempt = 0 + let success = false + + while (attempt < maxRetries && !success) { + attempt++ + + try { + // Execute TPC-B transaction (atomic in Convex - no BEGIN/COMMIT needed) + const res = convexMutation('benchmark:tpcbTransaction', { + aid, + tid, + bid, + delta, + }) + + if (res.status === 200) { + const body = res.json() + if (body.status === 'success') { + myFailRate.add(false) + success = true + } else { + // Check if it's an OCC failure that should be retried + if (body.errorData?.code === 'OptimisticConcurrencyControlFailure' && attempt < maxRetries) { + // Track retry but don't fail yet + counterRetries.add(1) + // Retry with exponential backoff + sleep(Math.min(0.01 * Math.pow(2, attempt - 1), 0.1)) + continue + } else { + myFailRate.add(true) + counterFailed.add(1) + console.log(`Transaction failed: ${body.errorMessage}`) + success = true // Don't retry non-OCC errors + } + } + } else if (res.status === 503) { + // 503 might be OCC failure, check response body + const body = res.json() + if (body.code === 'OptimisticConcurrencyControlFailure' && attempt < maxRetries) { + // Track retry but don't fail yet + counterRetries.add(1) + // Retry with exponential backoff + sleep(Math.min(0.01 * Math.pow(2, attempt - 1), 0.1)) + continue + } else { + myFailRate.add(true) + counterFailed.add(1) + console.log(`Transaction HTTP error: ${res.status}, ${JSON.stringify(body)}`) + success = true + } + } else { + myFailRate.add(true) + counterFailed.add(1) + const body = res.json() + console.log(`Transaction HTTP error: ${res.status}, ${JSON.stringify(body)}`) + success = true + } + } catch (e) { + myFailRate.add(true) + counterFailed.add(1) + console.log(`Transaction exception: ${e.message}`) + success = true + } + } + + // If we exhausted retries, mark as failed + if (!success) { + myFailRate.add(true) + counterFailed.add(1) + console.log(`Transaction failed after ${maxRetries} retries (OCC)`) + } + + const exFinish = new Date() + counterTX.add(1) + txTrend.add(exFinish - exStart) + + const finish = new Date() + if (finish - start > 1000) { + break + } + } + + const finish = new Date() + } +} + +// ============================================================================= +// TEARDOWN - Clean up database +// ============================================================================= + +export function teardown(data) { + console.log('Tearing down TPC-B benchmark data...') + + const batchSize = 4000 // Stay under Convex document limits + + // Clear history first (has references to other tables) + console.log('Clearing history...') + let cleared = 0 + let res + do { + res = convexMutation('benchmark:clearTableBatch', { + table: 'history', + limit: batchSize, + }) + if (checkConvexResponse(res, 'clearHistory')) { + cleared += res.json().value.deleted + } + } while (res.json()?.value?.deleted === batchSize) + console.log(` History cleared: ${cleared} records`) + + // Clear accounts + console.log('Clearing accounts...') + cleared = 0 + do { + res = convexMutation('benchmark:clearTableBatch', { + table: 'accounts', + limit: batchSize, + }) + if (checkConvexResponse(res, 'clearAccounts')) { + cleared += res.json().value.deleted + } + } while (res.json()?.value?.deleted === batchSize) + console.log(` Accounts cleared: ${cleared} records`) + + // Clear tellers + console.log('Clearing tellers...') + res = convexMutation('benchmark:clearTableBatch', { + table: 'tellers', + limit: batchSize, + }) + if (checkConvexResponse(res, 'clearTellers')) { + console.log(` Tellers cleared: ${res.json().value.deleted} records`) + } + + // Clear branches + console.log('Clearing branches...') + res = convexMutation('benchmark:clearTableBatch', { + table: 'branches', + limit: batchSize, + }) + if (checkConvexResponse(res, 'clearBranches')) { + console.log(` Branches cleared: ${res.json().value.deleted} records`) + } + + // Clear counters + console.log('Clearing counters...') + res = convexMutation('benchmark:clearTableBatch', { + table: 'counters', + limit: batchSize, + }) + if (checkConvexResponse(res, 'clearCounters')) { + console.log(` Counters cleared: ${res.json().value.deleted} records`) + } + + console.log('Teardown complete') +} \ No newline at end of file diff --git a/examples/convex/pgbenchy/main.tf b/examples/convex/pgbenchy/main.tf new file mode 100644 index 0000000..edf1033 --- /dev/null +++ b/examples/convex/pgbenchy/main.tf @@ -0,0 +1,42 @@ +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "4.0.0" + } + } +} + +provider "aws" { + region = "ap-southeast-1" +} + +module "script" { + source = "./modules/script" + + ami_id = var.ami_id + instances_count = var.instances_count + instance_type = var.instance_type + security_group_id = var.security_group_id + subnet_id = var.subnet_id + sut_name = var.sut_name + key_name = var.key_name + private_key_location = var.private_key_location + + testrun_name = var.testrun_name + testrun_id = var.testrun_id + test_origin = var.test_origin + benchmark_id = var.benchmark_id + supabench_token = var.supabench_token + supabench_uri = var.supabench_uri + + rate = var.rate + duration = var.duration + convex_url = var.convex_url + scale_factor = var.scale_factor + conns = var.conns + requests = var.requests + ramping_duration = var.ramping_duration + consecutive_duration = var.consecutive_duration + ramps_count = var.ramps_count +} diff --git a/examples/convex/pgbenchy/modules/script/entrypoint.sh.tpl b/examples/convex/pgbenchy/modules/script/entrypoint.sh.tpl new file mode 100644 index 0000000..44fa0c2 --- /dev/null +++ b/examples/convex/pgbenchy/modules/script/entrypoint.sh.tpl @@ -0,0 +1,35 @@ +#!/bin/bash + +wget https://golang.org/dl/go1.22.4.linux-amd64.tar.gz +sudo rm -rf /usr/local/go && sudo tar -C /usr/local -xzf go1.22.4.linux-amd64.tar.gz +export PATH=$PATH:/usr/local/go/bin + +go install go.k6.io/xk6@latest + +export K6_VERSION='v1.3.0' + +~/go/bin/xk6 build --output /tmp/k6/k6 \ + --with github.com/szkiba/xk6-prometheus@0f8e5dd \ + --with github.com/grafana/xk6-sql@659485a + +telegraf --config telegraf.conf &>/dev/null & + +cd /tmp/k6 || exit 1 + +export RUN_ID="${testrun_id}" +export BENCHMARK_ID="${benchmark_id}" +export TEST_RUN="${testrun_name}" +export TEST_ORIGIN="${test_origin}" +export SUPABENCH_TOKEN="${supabench_token}" +export SUPABENCH_URI="${supabench_uri}" +export CONVEX_URL="${convex_url}" +export INSTANCES="${instances}" + +make ${make_command} \ + SCALE_FACTOR="${scale_factor}" \ + CONNS="${conns}" \ + REQUESTS="${requests}" \ + RAMPING_DURATION="${ramping_duration}" \ + CONSECUTIVE_DURATION="${consecutive_duration}" \ + RAMPS_COUNT="${ramps_count}" \ + TEST_RUN="${testrun_name}" diff --git a/examples/convex/pgbenchy/modules/script/main.tf b/examples/convex/pgbenchy/modules/script/main.tf new file mode 100644 index 0000000..114ac91 --- /dev/null +++ b/examples/convex/pgbenchy/modules/script/main.tf @@ -0,0 +1,164 @@ +# creating ec2 instance that will be used to generate load +# Most likely you will not need to change it + +resource "aws_vpc" "ap-southeast-1" { + enable_dns_support = true + enable_dns_hostnames = true + assign_generated_ipv6_cidr_block = true + cidr_block = "10.0.0.0/16" +} + +resource "aws_subnet" "ap-southeast-1" { + vpc_id = aws_vpc.ap-southeast-1.id + cidr_block = cidrsubnet(aws_vpc.ap-southeast-1.cidr_block, 4, 1) + map_public_ip_on_launch = true + + ipv6_cidr_block = cidrsubnet(aws_vpc.ap-southeast-1.ipv6_cidr_block, 8, 1) + assign_ipv6_address_on_creation = true +} + +resource "aws_internet_gateway" "ap-southeast-1" { + vpc_id = aws_vpc.ap-southeast-1.id +} + +resource "aws_default_route_table" "ap-southeast-1" { + default_route_table_id = aws_vpc.ap-southeast-1.default_route_table_id + + route { + cidr_block = "0.0.0.0/0" + gateway_id = aws_internet_gateway.ap-southeast-1.id + } + + route { + ipv6_cidr_block = "::/0" + gateway_id = aws_internet_gateway.ap-southeast-1.id + } +} + +resource "aws_route_table_association" "ap-southeast-1" { + subnet_id = aws_subnet.ap-southeast-1.id + route_table_id = aws_default_route_table.ap-southeast-1.id +} + +resource "aws_security_group" "ap-southeast-1" { + name = "supabench-tf-security-group-${aws_vpc.ap-southeast-1.id}" + vpc_id = aws_vpc.ap-southeast-1.id + ingress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + ingress { + from_port = 0 + to_port = 0 + protocol = "-1" + ipv6_cidr_blocks = ["::/0"] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + ipv6_cidr_blocks = ["::/0"] + } +} + +resource "aws_instance" "k6" { + count = var.instances_count + + ami = var.ami_id + instance_type = var.instance_type + vpc_security_group_ids = ["${aws_security_group.ap-southeast-1.id}"] + subnet_id = aws_subnet.ap-southeast-1.id + ipv6_address_count = 1 + + key_name = var.key_name + + tags = { + terraform = "true" + environment = "qa" + app = var.sut_name + creator = "supabench" + } +} + +resource "null_resource" "remote" { + count = var.instances_count + + connection { + type = "ssh" + user = var.instance_user + host = aws_instance.k6[count.index].public_ip + private_key = var.private_key_location + timeout = "5m" + } + + provisioner "file" { + source = "${path.root}/k6" + destination = "/tmp" + } + + provisioner "file" { + destination = "/tmp/k6/entrypoint.sh" + + content = templatefile( + "${path.module}/entrypoint.sh.tpl", + { + # TPC-B variables + convex_url = var.convex_url + scale_factor = var.scale_factor + conns = var.conns + requests = var.requests + ramping_duration = var.ramping_duration + consecutive_duration = var.consecutive_duration + ramps_count = var.ramps_count + instances = var.instances_count + make_command = "tpcb" + + # don't change these + testrun_id = var.testrun_id + benchmark_id = var.benchmark_id + testrun_name = var.testrun_name + test_origin = var.test_origin + supabench_token = var.supabench_token + supabench_uri = var.supabench_uri + } + ) + } + + provisioner "remote-exec" { + inline = [ + "#!/bin/bash", + "echo \"export RUN_ID='${var.testrun_id}'\" >> ~/.bashrc", + "echo \"export BENCHMARK_ID='${var.benchmark_id}'\" >> ~/.bashrc", + "echo \"export TEST_RUN='${var.testrun_name}'\" >> ~/.bashrc", + "echo \"export TEST_ORIGIN='${var.test_origin}'\" >> ~/.bashrc", + "echo \"export SUPABENCH_TOKEN='${var.supabench_token}'\" >> ~/.bashrc", + "echo \"export SUPABENCH_URI='${var.supabench_uri}'\" >> ~/.bashrc", + "echo \"export CONVEX_URL='${var.convex_url}'\" >> ~/.bashrc", + ] + } + + provisioner "remote-exec" { + inline = [ + "#!/bin/bash", + "source ~/.bashrc", + "sudo chown -R ubuntu:ubuntu /tmp/k6", + "sudo chmod +x /tmp/k6/entrypoint.sh", + "/tmp/k6/entrypoint.sh", + ] + } + + depends_on = [ + aws_instance.k6, + ] +} diff --git a/examples/convex/pgbenchy/modules/script/variables.tf b/examples/convex/pgbenchy/modules/script/variables.tf new file mode 100644 index 0000000..a08a516 --- /dev/null +++ b/examples/convex/pgbenchy/modules/script/variables.tf @@ -0,0 +1,139 @@ +variable "ec2_name" { + description = "Name of ec2 loader instance" + type = string + default = "supaloader" # run ID +} + +variable "instances_count" { + description = "Number of EC2 instances (should be even)" + type = number + default = 8 +} + +variable "instance_type" { + description = "Size of ec2 loader instance" + type = string + default = "t2.micro" # c5.4xlarge +} + +variable "ami_id" { + description = "AMI to use for ec2 loader instance" + type = string +} + +variable "security_group_id" { + description = "Security group to use for ec2 loader instance" + type = string +} + +variable "subnet_id" { + description = "Subnet to use for ec2 loader instance" + type = string +} + +variable "instance_user" { + description = "The instance user for sshing" + type = string + default = "admin" +} + +variable "key_name" { + description = "The instance key" + type = string +} + +variable "private_key_location" { + description = "Location of your private key to SSH into the instance" + type = string +} + +variable "sut_name" { + description = "Name of the system under test" + type = string + default = "" +} + +variable "rate" { + description = "Rate of the system under test" + type = string + default = "1" +} + +variable "duration" { + description = "Duration of the test" + type = string + default = "60" +} + +# TPC-B configuration +variable "convex_url" { + description = "Convex deployment URL" + type = string +} + +variable "scale_factor" { + description = "TPC-B scale factor" + type = string + default = "10" +} + +variable "conns" { + description = "Number of connections" + type = string + default = "10" +} + +variable "requests" { + description = "Number of requests" + type = string + default = "10" +} + +variable "ramping_duration" { + description = "Ramping duration in seconds" + type = string + default = "10" +} + +variable "consecutive_duration" { + description = "Consecutive duration in seconds" + type = string + default = "20" +} + +variable "ramps_count" { + description = "Number of ramps" + type = string + default = "1" +} + +variable "testrun_name" { + description = "Name of the testrun" + type = string +} + +variable "testrun_id" { + description = "ID of the testrun" + type = string +} + +variable "test_origin" { + description = "Origin of the test" + type = string + default = "" +} + +variable "benchmark_id" { + description = "ID of the benchmark" + type = string +} + +variable "supabench_token" { + description = "Token to access the reports" + type = string +} + +variable "supabench_uri" { + description = "URI of the supabench server" + type = string +} diff --git a/examples/convex/pgbenchy/modules/setup/main.tf b/examples/convex/pgbenchy/modules/setup/main.tf new file mode 100644 index 0000000..4cbb840 --- /dev/null +++ b/examples/convex/pgbenchy/modules/setup/main.tf @@ -0,0 +1,34 @@ +resource "null_resource" "fly" { + triggers = { + app_name = var.app_name + fly_access_token = var.fly_access_token + } + + provisioner "local-exec" { + command = "/flyctl scale -a ${var.app_name} count ${var.app_nodes_count}" + environment = { + HOME = path.module + FLY_ACCESS_TOKEN = var.fly_access_token + } + } + + provisioner "local-exec" { + when = destroy + command = "/flyctl scale -a ${self.triggers.app_name} count 0" + environment = { + HOME = path.module + FLY_ACCESS_TOKEN = self.triggers.fly_access_token + } + } +} + +output "ready" { + # the value is not important because we're just + # using this for its dependencies. + value = {} + + # Anything that refers to this output must wait until + # the actions for azurerm_monitor_diagnostic_setting.example + # to have completed first. + depends_on = [null_resource.fly] +} \ No newline at end of file diff --git a/examples/convex/pgbenchy/modules/setup/variables.tf b/examples/convex/pgbenchy/modules/setup/variables.tf new file mode 100644 index 0000000..06a626f --- /dev/null +++ b/examples/convex/pgbenchy/modules/setup/variables.tf @@ -0,0 +1,15 @@ +variable "app_name" { + description = "Name of fly app" + type = string +} + +variable "fly_access_token" { + description = "Fly access token" + type = string +} + +variable "app_nodes_count" { + description = "Count of fly app nodes" + type = string + default = 6 +} \ No newline at end of file diff --git a/examples/convex/pgbenchy/project/.gitignore b/examples/convex/pgbenchy/project/.gitignore new file mode 100644 index 0000000..ead73e5 --- /dev/null +++ b/examples/convex/pgbenchy/project/.gitignore @@ -0,0 +1,6 @@ +.env.local +summary.json +results_*.json +package-lock.json +node_modules/ +.DS_Store \ No newline at end of file diff --git a/examples/convex/pgbenchy/project/convex/_generated/api.d.ts b/examples/convex/pgbenchy/project/convex/_generated/api.d.ts new file mode 100644 index 0000000..6bbd26d --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/_generated/api.d.ts @@ -0,0 +1,49 @@ +/* eslint-disable */ +/** + * Generated `api` utility. + * + * THIS CODE IS AUTOMATICALLY GENERATED. + * + * To regenerate, run `npx convex dev`. + * @module + */ + +import type * as benchmark from "../benchmark.js"; + +import type { + ApiFromModules, + FilterApi, + FunctionReference, +} from "convex/server"; + +declare const fullApi: ApiFromModules<{ + benchmark: typeof benchmark; +}>; + +/** + * A utility for referencing Convex functions in your app's public API. + * + * Usage: + * ```js + * const myFunctionReference = api.myModule.myFunction; + * ``` + */ +export declare const api: FilterApi< + typeof fullApi, + FunctionReference +>; + +/** + * A utility for referencing Convex functions in your app's internal API. + * + * Usage: + * ```js + * const myFunctionReference = internal.myModule.myFunction; + * ``` + */ +export declare const internal: FilterApi< + typeof fullApi, + FunctionReference +>; + +export declare const components: {}; diff --git a/examples/convex/pgbenchy/project/convex/_generated/api.js b/examples/convex/pgbenchy/project/convex/_generated/api.js new file mode 100644 index 0000000..44bf985 --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/_generated/api.js @@ -0,0 +1,23 @@ +/* eslint-disable */ +/** + * Generated `api` utility. + * + * THIS CODE IS AUTOMATICALLY GENERATED. + * + * To regenerate, run `npx convex dev`. + * @module + */ + +import { anyApi, componentsGeneric } from "convex/server"; + +/** + * A utility for referencing Convex functions in your app's API. + * + * Usage: + * ```js + * const myFunctionReference = api.myModule.myFunction; + * ``` + */ +export const api = anyApi; +export const internal = anyApi; +export const components = componentsGeneric(); diff --git a/examples/convex/pgbenchy/project/convex/_generated/dataModel.d.ts b/examples/convex/pgbenchy/project/convex/_generated/dataModel.d.ts new file mode 100644 index 0000000..f97fd19 --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/_generated/dataModel.d.ts @@ -0,0 +1,60 @@ +/* eslint-disable */ +/** + * Generated data model types. + * + * THIS CODE IS AUTOMATICALLY GENERATED. + * + * To regenerate, run `npx convex dev`. + * @module + */ + +import type { + DataModelFromSchemaDefinition, + DocumentByName, + TableNamesInDataModel, + SystemTableNames, +} from "convex/server"; +import type { GenericId } from "convex/values"; +import schema from "../schema.js"; + +/** + * The names of all of your Convex tables. + */ +export type TableNames = TableNamesInDataModel; + +/** + * The type of a document stored in Convex. + * + * @typeParam TableName - A string literal type of the table name (like "users"). + */ +export type Doc = DocumentByName< + DataModel, + TableName +>; + +/** + * An identifier for a document in Convex. + * + * Convex documents are uniquely identified by their `Id`, which is accessible + * on the `_id` field. To learn more, see [Document IDs](https://docs.convex.dev/using/document-ids). + * + * Documents can be loaded using `db.get(tableName, id)` in query and mutation functions. + * + * IDs are just strings at runtime, but this type can be used to distinguish them from other + * strings when type checking. + * + * @typeParam TableName - A string literal type of the table name (like "users"). + */ +export type Id = + GenericId; + +/** + * A type describing your Convex data model. + * + * This type includes information about what tables you have, the type of + * documents stored in those tables, and the indexes defined on them. + * + * This type is used to parameterize methods like `queryGeneric` and + * `mutationGeneric` to make them type-safe. + */ +export type DataModel = DataModelFromSchemaDefinition; diff --git a/examples/convex/pgbenchy/project/convex/_generated/server.d.ts b/examples/convex/pgbenchy/project/convex/_generated/server.d.ts new file mode 100644 index 0000000..bec05e6 --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/_generated/server.d.ts @@ -0,0 +1,143 @@ +/* eslint-disable */ +/** + * Generated utilities for implementing server-side Convex query and mutation functions. + * + * THIS CODE IS AUTOMATICALLY GENERATED. + * + * To regenerate, run `npx convex dev`. + * @module + */ + +import { + ActionBuilder, + HttpActionBuilder, + MutationBuilder, + QueryBuilder, + GenericActionCtx, + GenericMutationCtx, + GenericQueryCtx, + GenericDatabaseReader, + GenericDatabaseWriter, +} from "convex/server"; +import type { DataModel } from "./dataModel.js"; + +/** + * Define a query in this Convex app's public API. + * + * This function will be allowed to read your Convex database and will be accessible from the client. + * + * @param func - The query function. It receives a {@link QueryCtx} as its first argument. + * @returns The wrapped query. Include this as an `export` to name it and make it accessible. + */ +export declare const query: QueryBuilder; + +/** + * Define a query that is only accessible from other Convex functions (but not from the client). + * + * This function will be allowed to read from your Convex database. It will not be accessible from the client. + * + * @param func - The query function. It receives a {@link QueryCtx} as its first argument. + * @returns The wrapped query. Include this as an `export` to name it and make it accessible. + */ +export declare const internalQuery: QueryBuilder; + +/** + * Define a mutation in this Convex app's public API. + * + * This function will be allowed to modify your Convex database and will be accessible from the client. + * + * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. + * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. + */ +export declare const mutation: MutationBuilder; + +/** + * Define a mutation that is only accessible from other Convex functions (but not from the client). + * + * This function will be allowed to modify your Convex database. It will not be accessible from the client. + * + * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. + * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. + */ +export declare const internalMutation: MutationBuilder; + +/** + * Define an action in this Convex app's public API. + * + * An action is a function which can execute any JavaScript code, including non-deterministic + * code and code with side-effects, like calling third-party services. + * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. + * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. + * + * @param func - The action. It receives an {@link ActionCtx} as its first argument. + * @returns The wrapped action. Include this as an `export` to name it and make it accessible. + */ +export declare const action: ActionBuilder; + +/** + * Define an action that is only accessible from other Convex functions (but not from the client). + * + * @param func - The function. It receives an {@link ActionCtx} as its first argument. + * @returns The wrapped function. Include this as an `export` to name it and make it accessible. + */ +export declare const internalAction: ActionBuilder; + +/** + * Define an HTTP action. + * + * The wrapped function will be used to respond to HTTP requests received + * by a Convex deployment if the requests matches the path and method where + * this action is routed. Be sure to route your httpAction in `convex/http.js`. + * + * @param func - The function. It receives an {@link ActionCtx} as its first argument + * and a Fetch API `Request` object as its second. + * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up. + */ +export declare const httpAction: HttpActionBuilder; + +/** + * A set of services for use within Convex query functions. + * + * The query context is passed as the first argument to any Convex query + * function run on the server. + * + * This differs from the {@link MutationCtx} because all of the services are + * read-only. + */ +export type QueryCtx = GenericQueryCtx; + +/** + * A set of services for use within Convex mutation functions. + * + * The mutation context is passed as the first argument to any Convex mutation + * function run on the server. + */ +export type MutationCtx = GenericMutationCtx; + +/** + * A set of services for use within Convex action functions. + * + * The action context is passed as the first argument to any Convex action + * function run on the server. + */ +export type ActionCtx = GenericActionCtx; + +/** + * An interface to read from the database within Convex query functions. + * + * The two entry points are {@link DatabaseReader.get}, which fetches a single + * document by its {@link Id}, or {@link DatabaseReader.query}, which starts + * building a query. + */ +export type DatabaseReader = GenericDatabaseReader; + +/** + * An interface to read from and write to the database within Convex mutation + * functions. + * + * Convex guarantees that all writes within a single mutation are + * executed atomically, so you never have to worry about partial writes leaving + * your data in an inconsistent state. See [the Convex Guide](https://docs.convex.dev/understanding/convex-fundamentals/functions#atomicity-and-optimistic-concurrency-control) + * for the guarantees Convex provides your functions. + */ +export type DatabaseWriter = GenericDatabaseWriter; diff --git a/examples/convex/pgbenchy/project/convex/_generated/server.js b/examples/convex/pgbenchy/project/convex/_generated/server.js new file mode 100644 index 0000000..bf3d25a --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/_generated/server.js @@ -0,0 +1,93 @@ +/* eslint-disable */ +/** + * Generated utilities for implementing server-side Convex query and mutation functions. + * + * THIS CODE IS AUTOMATICALLY GENERATED. + * + * To regenerate, run `npx convex dev`. + * @module + */ + +import { + actionGeneric, + httpActionGeneric, + queryGeneric, + mutationGeneric, + internalActionGeneric, + internalMutationGeneric, + internalQueryGeneric, +} from "convex/server"; + +/** + * Define a query in this Convex app's public API. + * + * This function will be allowed to read your Convex database and will be accessible from the client. + * + * @param func - The query function. It receives a {@link QueryCtx} as its first argument. + * @returns The wrapped query. Include this as an `export` to name it and make it accessible. + */ +export const query = queryGeneric; + +/** + * Define a query that is only accessible from other Convex functions (but not from the client). + * + * This function will be allowed to read from your Convex database. It will not be accessible from the client. + * + * @param func - The query function. It receives a {@link QueryCtx} as its first argument. + * @returns The wrapped query. Include this as an `export` to name it and make it accessible. + */ +export const internalQuery = internalQueryGeneric; + +/** + * Define a mutation in this Convex app's public API. + * + * This function will be allowed to modify your Convex database and will be accessible from the client. + * + * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. + * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. + */ +export const mutation = mutationGeneric; + +/** + * Define a mutation that is only accessible from other Convex functions (but not from the client). + * + * This function will be allowed to modify your Convex database. It will not be accessible from the client. + * + * @param func - The mutation function. It receives a {@link MutationCtx} as its first argument. + * @returns The wrapped mutation. Include this as an `export` to name it and make it accessible. + */ +export const internalMutation = internalMutationGeneric; + +/** + * Define an action in this Convex app's public API. + * + * An action is a function which can execute any JavaScript code, including non-deterministic + * code and code with side-effects, like calling third-party services. + * They can be run in Convex's JavaScript environment or in Node.js using the "use node" directive. + * They can interact with the database indirectly by calling queries and mutations using the {@link ActionCtx}. + * + * @param func - The action. It receives an {@link ActionCtx} as its first argument. + * @returns The wrapped action. Include this as an `export` to name it and make it accessible. + */ +export const action = actionGeneric; + +/** + * Define an action that is only accessible from other Convex functions (but not from the client). + * + * @param func - The function. It receives an {@link ActionCtx} as its first argument. + * @returns The wrapped function. Include this as an `export` to name it and make it accessible. + */ +export const internalAction = internalActionGeneric; + +/** + * Define an HTTP action. + * + * The wrapped function will be used to respond to HTTP requests received + * by a Convex deployment if the requests matches the path and method where + * this action is routed. Be sure to route your httpAction in `convex/http.js`. + * + * @param func - The function. It receives an {@link ActionCtx} as its first argument + * and a Fetch API `Request` object as its second. + * @returns The wrapped function. Import this function from `convex/http.js` and route it to hook it up. + */ +export const httpAction = httpActionGeneric; diff --git a/examples/convex/pgbenchy/project/convex/benchmark.ts b/examples/convex/pgbenchy/project/convex/benchmark.ts new file mode 100644 index 0000000..c30917b --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/benchmark.ts @@ -0,0 +1,424 @@ +import { mutation, query } from "./_generated/server"; +import { v } from "convex/values"; + +// ============================================================================= +// SEEDING FUNCTIONS +// ============================================================================= + +/** + * Seed branches table + * Creates `scale` branches with bid 1..scale + */ +export const seedBranches = mutation({ + args: { scale: v.number() }, + handler: async (ctx, { scale }) => { + // Check if already seeded + const existing = await ctx.db + .query("branches") + .withIndex("by_bid", (q) => q.eq("bid", 1)) + .unique(); + + if (existing) { + console.log("Branches already seeded, skipping..."); + return { seeded: 0, skipped: true }; + } + + for (let bid = 1; bid <= scale; bid++) { + await ctx.db.insert("branches", { + bid, + bbalance: 0, + filler: "".padEnd(88, " "), // char(88) filler + }); + } + + return { seeded: scale, skipped: false }; + }, +}); + +/** + * Seed tellers table + * Creates 10 tellers per branch (10 * scale total) + */ +export const seedTellers = mutation({ + args: { scale: v.number() }, + handler: async (ctx, { scale }) => { + // Check if already seeded + const existing = await ctx.db + .query("tellers") + .withIndex("by_tid", (q) => q.eq("tid", 1)) + .unique(); + + if (existing) { + console.log("Tellers already seeded, skipping..."); + return { seeded: 0, skipped: true }; + } + + let count = 0; + for (let bid = 1; bid <= scale; bid++) { + // Get branch document reference + const branch = await ctx.db + .query("branches") + .withIndex("by_bid", (q) => q.eq("bid", bid)) + .unique(); + + if (!branch) { + throw new Error(`Branch ${bid} not found. Run seedBranches first.`); + } + + // 10 tellers per branch + for (let t = 1; t <= 10; t++) { + const tid = (bid - 1) * 10 + t; + await ctx.db.insert("tellers", { + tid, + bid, + branchId: branch._id, + tbalance: 0, + filler: "".padEnd(84, " "), // char(84) filler + }); + count++; + } + } + + return { seeded: count, skipped: false }; + }, +}); + +/** + * Seed accounts in batches + * Convex mutations have a 1-second timeout, so we batch account creation + * + * Call multiple times with different startAid values: + * seedAccountBatch({ startAid: 1, count: 5000, scale: 10 }) + * seedAccountBatch({ startAid: 5001, count: 5000, scale: 10 }) + * etc. + */ +export const seedAccountBatch = mutation({ + args: { + startAid: v.number(), + count: v.number(), + scale: v.number(), + }, + handler: async (ctx, { startAid, count, scale }) => { + // Check if this batch already exists + const existing = await ctx.db + .query("accounts") + .withIndex("by_aid", (q) => q.eq("aid", startAid)) + .unique(); + + if (existing) { + console.log(`Account ${startAid} already exists, skipping batch...`); + return { seeded: 0, skipped: true }; + } + + // Pre-fetch all branches for efficiency + const branches = await ctx.db.query("branches").collect(); + const branchMap = new Map(branches.map((b) => [b.bid, b._id])); + + let seeded = 0; + for (let i = 0; i < count; i++) { + const aid = startAid + i; + + // Distribute accounts across branches (round-robin) + const bid = ((aid - 1) % scale) + 1; + const branchId = branchMap.get(bid); + + if (!branchId) { + throw new Error(`Branch ${bid} not found. Run seedBranches first.`); + } + + await ctx.db.insert("accounts", { + aid, + bid, + branchId, + abalance: 0, + filler: "".padEnd(84, " "), // char(84) filler + }); + seeded++; + } + + return { seeded, skipped: false }; + }, +}); + +/** + * Seed history with random historical data + * Similar to: INSERT INTO history SELECT random()... FROM accounts LIMIT count + */ +export const seedHistory = mutation({ + args: { count: v.number() }, + handler: async (ctx, { count }) => { + // Get some accounts to reference + const accounts = await ctx.db.query("accounts").take(Math.min(count, 1000)); + + if (accounts.length === 0) { + throw new Error("No accounts found. Run seedAccountBatch first."); + } + + let seeded = 0; + for (let i = 0; i < count; i++) { + const account = accounts[i % accounts.length]; + const tid = Math.floor(Math.random() * 100) + 1; + const delta = Math.floor(Math.random() * 1000) - 500; + + // Insert history record and use its document ID as hid + const historyId = await ctx.db.insert("history", { + // hid: "", // Temporary placeholder + tid, + bid: account.bid, + aid: account.aid, + delta, + mtime: Date.now(), + filler: "".padEnd(22, " "), // char(22) filler + accountId: account._id, + branchId: account.branchId, + }); + + // Update hid to use the document's own ID + // await ctx.db.patch(historyId, { hid: historyId }); + seeded++; + } + + return { seeded }; + }, +}); + +// ============================================================================= +// TPC-B TRANSACTION +// ============================================================================= + +/** + * The main TPC-B transaction + * + * This is the equivalent of the pgbench transaction: + * BEGIN; + * UPDATE accounts SET abalance = abalance + :delta WHERE aid = :aid; + * SELECT abalance FROM accounts WHERE aid = :aid; + * UPDATE tellers SET tbalance = tbalance + :delta WHERE tid = :tid; + * UPDATE branches SET bbalance = bbalance + :delta WHERE bid = :bid; + * INSERT INTO history (tid, bid, aid, delta, mtime) VALUES (:tid, :bid, :aid, :delta, CURRENT_TIMESTAMP); + * COMMIT; + * + * In Convex, the entire mutation is atomic - no explicit BEGIN/COMMIT needed. + * Convex uses OCC (Optimistic Concurrency Control) and will retry on conflicts. + */ +export const tpcbTransaction = mutation({ + args: { + aid: v.number(), // Account ID: 1 to 100000 * scale + tid: v.number(), // Teller ID: 1 to 10 * scale + bid: v.number(), // Branch ID: 1 to scale + delta: v.number(), // Transaction amount: -500 to 500 (or -5000 to 5000) + }, + handler: async (ctx, { aid, tid, bid, delta }) => { + // 1. Update account balance + const account = await ctx.db + .query("accounts") + .withIndex("by_aid", (q) => q.eq("aid", aid)) + .unique(); + + if (!account) { + throw new Error(`Account ${aid} not found`); + } + + const newAbalance = account.abalance + delta; + await ctx.db.patch(account._id, { abalance: newAbalance }); + + // 2. SELECT abalance (we already have it from step 1) + // In the original pgbench, this is a separate SELECT statement + // Here we just use the value we computed + + // 3. Update teller balance + const teller = await ctx.db + .query("tellers") + .withIndex("by_tid", (q) => q.eq("tid", tid)) + .unique(); + + if (!teller) { + throw new Error(`Teller ${tid} not found`); + } + + await ctx.db.patch(teller._id, { tbalance: teller.tbalance + delta }); + + // 4. Update branch balance + const branch = await ctx.db + .query("branches") + .withIndex("by_bid", (q) => q.eq("bid", bid)) + .unique(); + + if (!branch) { + throw new Error(`Branch ${bid} not found`); + } + + await ctx.db.patch(branch._id, { bbalance: branch.bbalance + delta }); + + // 5. Insert history record + // Use document ID as hid to avoid counter contention + const historyId = await ctx.db.insert("history", { + tid, + bid, + aid, + delta, + mtime: Date.now(), + filler: "".padEnd(22, " "), + accountId: account._id, + tellerId: teller._id, + branchId: branch._id, + }); + + // Update hid to use the document's own ID + // await ctx.db.patch(historyId, { hid: historyId }); + + // Return account balance (per TPC-B spec) + return { abalance: newAbalance }; + }, +}); + +// ============================================================================= +// CLEANUP FUNCTIONS +// ============================================================================= + +/** + * Clear all benchmark data + * Use in teardown to reset the database + */ +export const clearAllData = mutation({ + args: {}, + handler: async (ctx) => { + // Delete in order to respect "foreign key" logic (history first, then accounts, etc.) + + // Clear history + const history = await ctx.db.query("history").collect(); + for (const h of history) { + await ctx.db.delete(h._id); + } + + // Clear accounts + const accounts = await ctx.db.query("accounts").collect(); + for (const a of accounts) { + await ctx.db.delete(a._id); + } + + // Clear tellers + const tellers = await ctx.db.query("tellers").collect(); + for (const t of tellers) { + await ctx.db.delete(t._id); + } + + // Clear branches + const branches = await ctx.db.query("branches").collect(); + for (const b of branches) { + await ctx.db.delete(b._id); + } + + // Clear counters + const counters = await ctx.db.query("counters").collect(); + for (const c of counters) { + await ctx.db.delete(c._id); + } + + return { + deleted: { + history: history.length, + accounts: accounts.length, + tellers: tellers.length, + branches: branches.length, + counters: counters.length, + }, + }; + }, +}); + +/** + * Clear data in batches (for large datasets) + * Convex has limits on how many documents can be deleted in one mutation + */ +export const clearTableBatch = mutation({ + args: { + table: v.union( + v.literal("history"), + v.literal("accounts"), + v.literal("tellers"), + v.literal("branches"), + v.literal("counters") + ), + limit: v.number(), + }, + handler: async (ctx, { table, limit }) => { + let deleted = 0; + + if (table === "history") { + const docs = await ctx.db.query("history").take(limit); + for (const doc of docs) { + await ctx.db.delete(doc._id); + deleted++; + } + } else if (table === "accounts") { + const docs = await ctx.db.query("accounts").take(limit); + for (const doc of docs) { + await ctx.db.delete(doc._id); + deleted++; + } + } else if (table === "tellers") { + const docs = await ctx.db.query("tellers").take(limit); + for (const doc of docs) { + await ctx.db.delete(doc._id); + deleted++; + } + } else if (table === "branches") { + const docs = await ctx.db.query("branches").take(limit); + for (const doc of docs) { + await ctx.db.delete(doc._id); + deleted++; + } + } else if (table === "counters") { + const docs = await ctx.db.query("counters").take(limit); + for (const doc of docs) { + await ctx.db.delete(doc._id); + deleted++; + } + } + + return { deleted, table }; + }, +}); + +// ============================================================================= +// UTILITY QUERIES +// ============================================================================= + +/** + * Get current table counts (useful for verification) + */ +export const getTableCounts = query({ + args: {}, + handler: async (ctx) => { + const branches = await ctx.db.query("branches").collect(); + const tellers = await ctx.db.query("tellers").collect(); + const accounts = await ctx.db.query("accounts").collect(); + const history = await ctx.db.query("history").collect(); + + return { + branches: branches.length, + tellers: tellers.length, + accounts: accounts.length, + history: history.length, + }; + }, +}); + +/** + * Get total balances (for verification) + */ +export const getTotalBalances = query({ + args: {}, + handler: async (ctx) => { + const branches = await ctx.db.query("branches").collect(); + const tellers = await ctx.db.query("tellers").collect(); + const accounts = await ctx.db.query("accounts").collect(); + + return { + branchTotal: branches.reduce((sum, b) => sum + b.bbalance, 0), + tellerTotal: tellers.reduce((sum, t) => sum + t.tbalance, 0), + accountTotal: accounts.reduce((sum, a) => sum + a.abalance, 0), + }; + }, +}); \ No newline at end of file diff --git a/examples/convex/pgbenchy/project/convex/schema.ts b/examples/convex/pgbenchy/project/convex/schema.ts new file mode 100644 index 0000000..51f0de4 --- /dev/null +++ b/examples/convex/pgbenchy/project/convex/schema.ts @@ -0,0 +1,54 @@ +import { defineSchema, defineTable } from "convex/server"; +import { v } from "convex/values"; + +export default defineSchema({ + branches: defineTable({ + bid: v.number(), + bbalance: v.number(), + filler: v.optional(v.string()), + }).index("by_bid", ["bid"]), + + tellers: defineTable({ + tid: v.number(), + bid: v.number(), // Store bid directly for easier lookups + branchId: v.id("branches"), + tbalance: v.number(), + filler: v.optional(v.string()), + }) + .index("by_tid", ["tid"]) + .index("by_bid", ["bid"]), + + accounts: defineTable({ + aid: v.number(), + bid: v.number(), // Store bid directly for easier lookups + branchId: v.id("branches"), + abalance: v.number(), + filler: v.optional(v.string()), + }) + .index("by_aid", ["aid"]) + .index("by_bid", ["bid"]), + + history: defineTable({ + // hid: v.string(), // Changed from v.number() to v.string() to use Convex IDs and eliminate counter contention + tid: v.number(), + bid: v.number(), + aid: v.number(), + delta: v.number(), + mtime: v.number(), + filler: v.optional(v.string()), + // Document references (optional, for joins if needed) + accountId: v.optional(v.id("accounts")), + tellerId: v.optional(v.id("tellers")), + branchId: v.optional(v.id("branches")), + }) + // .index("by_hid", ["hid"]) + .index("by_tid", ["tid"]) + .index("by_bid", ["bid"]) + .index("by_aid", ["aid"]), + + // Counter table for generating sequential IDs + counters: defineTable({ + name: v.string(), + value: v.number(), + }).index("by_name", ["name"]), +}); \ No newline at end of file diff --git a/examples/convex/pgbenchy/project/package.json b/examples/convex/pgbenchy/project/package.json new file mode 100644 index 0000000..4db5b08 --- /dev/null +++ b/examples/convex/pgbenchy/project/package.json @@ -0,0 +1,29 @@ +{ + "name": "convex-tpcb", + "version": "1.0.0", + "description": "TPC-B benchmark for Convex using k6", + "private": true, + "scripts": { + "dev": "convex dev", + "deploy": "convex deploy", + "seed": "node scripts/seed.js", + "seed:small": "node scripts/seed.js 1", + "seed:medium": "node scripts/seed.js 10", + "seed:large": "node scripts/seed.js 100", + "clear": "node scripts/clear.js", + "test": "k6 run k6/tpcb-convex.js", + "test:quick": "k6 run k6/tpcb-convex.js -e SCALE_FACTOR=1 -e CONNS=5 -e CONSECUTIVE_DURATION=30", + "test:medium": "k6 run k6/tpcb-convex.js -e SCALE_FACTOR=10 -e CONNS=20 -e CONSECUTIVE_DURATION=300", + "test:full": "k6 run k6/tpcb-convex.js -e SCALE_FACTOR=10 -e CONNS=50 -e CONSECUTIVE_DURATION=600", + "bench": "bash scripts/run.sh", + "bench:quick": "bash scripts/run.sh 1 60 5", + "bench:medium": "bash scripts/run.sh 10 300 20", + "bench:full": "bash scripts/run.sh 10 600 50" + }, + "dependencies": { + "convex": "^1.17.0" + }, + "devDependencies": { + "typescript": "^5.0.0" + } +} \ No newline at end of file diff --git a/examples/convex/pgbenchy/project/scripts/clear.js b/examples/convex/pgbenchy/project/scripts/clear.js new file mode 100755 index 0000000..6e36fce --- /dev/null +++ b/examples/convex/pgbenchy/project/scripts/clear.js @@ -0,0 +1,49 @@ +#!/usr/bin/env node +// clear.js - Batched cleanup script for TPC-B benchmark + +/** + * Standalone cleanup script that can be run independently of k6 + * Usage: node scripts/clear.js + */ + +const CONVEX_URL = process.env.CONVEX_URL || 'https://your-deployment.convex.cloud' + +async function convexMutation(path, args) { + const response = await fetch(`${CONVEX_URL}/api/mutation`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ path, args, format: 'json' }), + }) + return response.json() +} + +async function clear() { + console.log('Clearing TPC-B benchmark data...') + console.log(`Convex URL: ${CONVEX_URL}\n`) + + const batchSize = 8000 + const tables = ['history', 'accounts', 'tellers', 'branches', 'counters'] + + for (const table of tables) { + console.log(`Clearing ${table}...`) + let totalCleared = 0 + let result + + do { + result = await convexMutation('benchmark:clearTableBatch', { + table, + limit: batchSize, + }) + totalCleared += result.deleted || 0 + if (result.deleted > 0) { + process.stdout.write(` Cleared: ${totalCleared}\r`) + } + } while (result.deleted === batchSize) + + console.log(` Cleared: ${totalCleared} records`) + } + + console.log('\nCleanup complete!') +} + +clear().catch(console.error) diff --git a/examples/convex/pgbenchy/project/scripts/run.sh b/examples/convex/pgbenchy/project/scripts/run.sh new file mode 100755 index 0000000..0a7a658 --- /dev/null +++ b/examples/convex/pgbenchy/project/scripts/run.sh @@ -0,0 +1,55 @@ +#!/bin/bash +# run.sh - Full benchmark runner for TPC-B + +set -e + +# Default configuration +SCALE_FACTOR=${SCALE_FACTOR:-10} +CONNS=${CONNS:-10} +REQUESTS=${REQUESTS:-10} +RAMPING_DURATION=${RAMPING_DURATION:-10} +CONSECUTIVE_DURATION=${CONSECUTIVE_DURATION:-20} +RAMPS_COUNT=${RAMPS_COUNT:-1} +TEST_RUN=${TEST_RUN:-$(date +%Y%m%d_%H%M%S)} + +# Check if CONVEX_URL is set +if [ -z "$CONVEX_URL" ]; then + echo "Error: CONVEX_URL environment variable is not set" + echo "Please set it to your Convex deployment URL" + exit 1 +fi + +echo "=========================================" +echo "TPC-B Benchmark Runner" +echo "=========================================" +echo "Configuration:" +echo " CONVEX_URL: $CONVEX_URL" +echo " SCALE_FACTOR: $SCALE_FACTOR" +echo " CONNS: $CONNS" +echo " REQUESTS: $REQUESTS" +echo " RAMPING_DURATION: ${RAMPING_DURATION}s" +echo " CONSECUTIVE_DURATION: ${CONSECUTIVE_DURATION}s" +echo " RAMPS_COUNT: $RAMPS_COUNT" +echo " TEST_RUN: $TEST_RUN" +echo "=========================================" +echo "" + +# Run k6 test +./k6w run \ + --out json=results_${TEST_RUN}.json \ + -e CONVEX_URL="$CONVEX_URL" \ + -e SCALE_FACTOR="$SCALE_FACTOR" \ + -e CONNS="$CONNS" \ + -e REQUESTS="$REQUESTS" \ + -e RAMPING_DURATION="$RAMPING_DURATION" \ + -e CONSECUTIVE_DURATION="$CONSECUTIVE_DURATION" \ + -e RAMPS_COUNT="$RAMPS_COUNT" \ + -e TEST_RUN="$TEST_RUN" \ + k6/tpcb-convex.js + +echo "" +echo "=========================================" +echo "Benchmark complete!" +echo "Results saved to: results_${TEST_RUN}.json" +echo "Summary saved to: summary.json" +echo "=========================================" diff --git a/examples/convex/pgbenchy/project/scripts/seed.js b/examples/convex/pgbenchy/project/scripts/seed.js new file mode 100755 index 0000000..2e6e12e --- /dev/null +++ b/examples/convex/pgbenchy/project/scripts/seed.js @@ -0,0 +1,57 @@ +#!/usr/bin/env node +// seed.js - Batched seeding script for TPC-B benchmark + +/** + * Standalone seeding script that can be run independently of k6 + * Usage: node scripts/seed.js [scale_factor] + */ + +const CONVEX_URL = process.env.CONVEX_URL || 'https://your-deployment.convex.cloud' +const SCALE = parseInt(process.argv[2] || process.env.SCALE_FACTOR || '10') + +async function convexMutation(path, args) { + const response = await fetch(`${CONVEX_URL}/api/mutation`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ path, args, format: 'json' }), + }) + return response.json() +} + +async function seed() { + console.log(`Seeding TPC-B benchmark with scale factor ${SCALE}`) + console.log(`Convex URL: ${CONVEX_URL}\n`) + + // Seed branches + console.log(`Seeding ${SCALE} branches...`) + let result = await convexMutation('benchmark:seedBranches', { scale: SCALE }) + console.log(` Branches: ${result.seeded} seeded\n`) + + // Seed tellers + console.log(`Seeding ${SCALE * 10} tellers...`) + result = await convexMutation('benchmark:seedTellers', { scale: SCALE }) + console.log(` Tellers: ${result.seeded} seeded\n`) + + // Seed accounts in batches + const totalAccounts = 100000 * SCALE + const batchSize = 5000 + console.log(`Seeding ${totalAccounts} accounts in batches of ${batchSize}...`) + + for (let startAid = 1; startAid <= totalAccounts; startAid += batchSize) { + const count = Math.min(batchSize, totalAccounts - startAid + 1) + await convexMutation('benchmark:seedAccountBatch', { + startAid, + count, + scale: SCALE, + }) + + const progress = Math.min(100, Math.round((startAid / totalAccounts) * 100)) + if (startAid % (batchSize * 10) === 1 || startAid + batchSize > totalAccounts) { + console.log(` Progress: ${progress}% (${startAid + count - 1}/${totalAccounts})`) + } + } + + console.log('\nSeeding complete!') +} + +seed().catch(console.error) diff --git a/examples/convex/pgbenchy/variables.tf b/examples/convex/pgbenchy/variables.tf new file mode 100644 index 0000000..2b155f2 --- /dev/null +++ b/examples/convex/pgbenchy/variables.tf @@ -0,0 +1,158 @@ +variable "testrun_name" { + description = "Name of the testrun" + type = string +} + +variable "testrun_id" { + description = "ID of the testrun" + type = string +} + +variable "test_origin" { + description = "Origin of the test" + type = string + default = "" +} + +variable "benchmark_id" { + description = "ID of the benchmark" + type = string +} + +variable "supabench_token" { + description = "Token to access the supabench" + type = string + sensitive = true +} + +variable "supabench_uri" { + description = "URI of the supabench server" + type = string +} + +variable "instances_count" { + description = "Number of EC2 instances (should be even)" + type = number + default = 8 +} + +variable "ec2_name" { + description = "Name of ec2 loader instance" + type = string + default = "supaloader" # run ID +} + +variable "instance_type" { + description = "Size of ec2 loader instance" + type = string + default = "t2.micro" # c5.4xlarge +} + +variable "ami_id" { + description = "AMI to use for ec2 loader instance" + type = string +} + +variable "security_group_id" { + description = "Security group to use for ec2 loader instance" + type = string +} + +variable "subnet_id" { + description = "Subnet to use for ec2 loader instance" + type = string +} + +variable "instance_user" { + description = "The instance user for sshing" + type = string + default = "admin" +} + +variable "key_name" { + description = "The instance key" + type = string +} + +variable "private_key_location" { + description = "Location of your private key to SSH into the instance" + type = string +} + +variable "sut_name" { + description = "Name of the system under test" + type = string + default = "" +} + +variable "rate" { + description = "Rate of the system under test" + type = string + default = "1" +} + +variable "duration" { + description = "Duration of the test" + type = string + default = "60" +} + +# TPC-B configuration +variable "convex_url" { + description = "Convex deployment URL" + type = string +} + +variable "scale_factor" { + description = "TPC-B scale factor" + type = string + default = "10" +} + +variable "conns" { + description = "Number of connections" + type = string + default = "10" +} + +variable "requests" { + description = "Number of requests" + type = string + default = "10" +} + +variable "ramping_duration" { + description = "Ramping duration in seconds" + type = string + default = "10" +} + +variable "consecutive_duration" { + description = "Consecutive duration in seconds" + type = string + default = "20" +} + +variable "ramps_count" { + description = "Number of ramps" + type = string + default = "1" +} + + +variable "app_name" { + description = "Name of fly app" + type = string + default = "realtime-qa" # fly app name +} + +variable "fly_access_token" { + description = "Fly access token" + type = string +} + +variable "app_nodes_count" { + description = "Count of fly app nodes" + type = string + default = 6 +}