diff --git a/github-metrics/package-lock.json b/github-metrics/package-lock.json index f52d4b0..fc1bfbe 100644 --- a/github-metrics/package-lock.json +++ b/github-metrics/package-lock.json @@ -134,6 +134,7 @@ "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.5.tgz", "integrity": "sha512-t54CUOsFMappY1Jbzb7fetWeO0n6K0k/4+/ZpkS+3Joz8I4VcvY9OiEBFRYISqaI2fq5sCiPtAjRDOzVYG8m+Q==", "license": "MIT", + "peer": true, "dependencies": { "@octokit/auth-token": "^6.0.0", "@octokit/graphql": "^9.0.2", diff --git a/github-reporting/README.md b/github-reporting/README.md new file mode 100644 index 0000000..4ad3458 --- /dev/null +++ b/github-reporting/README.md @@ -0,0 +1,180 @@ +# GitHub Reporting + +This tool reads GitHub metrics stored in MongoDB Atlas (written by the [github-metrics](../github-metrics/README.md) tool) and exports them to +CSV files for reporting to external stakeholders. + +## Overview + +The tool exports metrics to three separate CSV files for easy import into Google Sheets: + +- **summary.csv** - Core metrics per date/repo (clones, views, stars, forks, watchers) +- **referrals.csv** - One row per referrer per date/repo +- **top-paths.csv** - One row per path per date/repo + +The Date, Owner, and Repository columns in each file allow you to join/link data across sheets for analysis. + +## Prerequisites + +**Atlas**: + +- An Atlas Database User with read permissions for the **Developer Docs** -> **Project Metrics** project. +- A valid connection string for the cluster above. + +Contact a member of the Developer Docs team to be added to this project and get the connection string. + +**System**: + +- Node.js/npm installed + +## Setup + +1. **Create a `.env` file** + + Create a `.env` file that contains the following: + + ``` + ATLAS_CONNECTION_STRING="yourConnectionString" + ``` + + Replace the placeholder value with your connection string. + + > Note: The `.env` file is in the `.gitignore`, so no worries about accidentally committing credentials. + +2. **Install the dependencies** + + From the root of the directory, run: + + ``` + npm install + ``` + +## Usage + +The tool supports two invocation methods: direct command-line arguments or a configuration file. + +### Method 1: Direct Command-Line Arguments + +Use the `export` command with options: + +```bash +node --env-file=.env index.js export [options] +``` + +**Options:** + +| Option | Description | +|--------|--------------------------------------------------------------------------------------------------| +| `-s, --start-date ` | Start date for the report (ISO format, e.g., 2024-01-01) | +| `-e, --end-date ` | End date for the report (ISO format, e.g., 2024-12-31) | +| `-p, --projects ` | Space-separated list of owner/repo projects (e.g., mongodb/docs mongodb/sample-app-nodejs-mflix) | +| `-o, --output ` | Output directory for CSV files | + +**Examples:** + +```bash +# Export all metrics from all projects +node --env-file=.env index.js export -o my-report + +# Export metrics for a specific date range +node --env-file=.env index.js export -s 2024-01-01 -e 2024-12-31 -o q4-report + +# Export metrics for specific projects +node --env-file=.env index.js export -p mongodb/docs mongodb/docs-notebooks -o docs-report + +# Combine all options +node --env-file=.env index.js export -s 2024-01-01 -e 2024-03-31 -p mongodb/docs -o q1-docs-report +``` + +### Method 2: Configuration File + +Use the `export-config` command with a JSON configuration file: + +```bash +node --env-file=.env index.js export-config [options] +``` + +**Options:** + +| Option | Description | +|--------|-------------| +| `-o, --output ` | Output directory for CSV files (overrides config file) | + +**Example configuration file (`config.json`):** + +```json +{ + "startDate": "2025-01-01", + "endDate": "2025-12-31", + "projects": [ + { "owner": "mongodb", "repo": "docs" }, + { "owner": "mongodb", "repo": "docs-notebooks" } + ], + "output": "annual-report" +} +``` + +**Run with config file:** + +```bash +node --env-file=.env index.js export-config config.json +``` + +**Override output directory:** + +```bash +node --env-file=.env index.js export-config config.json -o different-output +``` + +## Output + +The tool creates a directory containing three CSV files: + +``` +my-report/ +├── summary.csv +├── referrals.csv +└── top-paths.csv +``` + +### summary.csv + +| Column | Description | +|--------|-------------------------------------------------| +| Date | ISO timestamp of when metrics were collected | +| Owner | GitHub organization/owner | +| Repository | Repository name | +| Clones | Number of clones in the last 14 days | +| Page Views | Total page views in the last 14 days | +| Unique Views | Unique visitors in the last 14 days | +| Stars | Star count (cumulative total, current count) | +| Forks | Fork count (cumulative total, current count) | +| Watchers | Watcher count (cumulative total, current count) | + +### referrals.csv + +| Column | Description | +|--------|-------------| +| Date | ISO timestamp of when metrics were collected | +| Owner | GitHub organization/owner | +| Repository | Repository name | +| Referrer | Traffic source (e.g., google.com, github.com) | +| Count | Total visits from this referrer | +| Uniques | Unique visitors from this referrer | + +### top-paths.csv + +| Column | Description | +|--------|-------------| +| Date | ISO timestamp of when metrics were collected | +| Owner | GitHub organization/owner | +| Repository | Repository name | +| Path | Path within the repository | +| Count | Total visits to this path | +| Uniques | Unique visitors to this path | + +## Importing to Google Sheets + +1. Create a new Google Sheet +2. Go to **File** → **Import** +3. Upload each CSV file as a separate sheet +4. Use the Date, Owner, and Repository columns to create relationships between sheets using VLOOKUP or pivot tables diff --git a/github-reporting/config.json.example b/github-reporting/config.json.example new file mode 100644 index 0000000..205f2fa --- /dev/null +++ b/github-reporting/config.json.example @@ -0,0 +1,10 @@ +{ + "startDate": "2026-01-01", + "endDate": "2026-01-13", + "projects": [ + { "owner": "mongodb", "repo": "sample-app-java-mflix" }, + { "owner": "mongodb", "repo": "sample-app-nodejs-mflix" }, + { "owner": "mongodb", "repo": "sample-app-python-mflix" } + ], + "output": "output" +} \ No newline at end of file diff --git a/github-reporting/index.js b/github-reporting/index.js new file mode 100644 index 0000000..ff48d49 --- /dev/null +++ b/github-reporting/index.js @@ -0,0 +1,127 @@ +#!/usr/bin/env node + +import { Command } from 'commander'; +import { readFileSync } from 'fs'; +import { readMetricsFromAtlas } from './read-from-db.js'; +import { writeMetricsToCsv, generateOutputDir } from './write-csv.js'; + +const program = new Command(); + +program + .name('github-reporting') + .description('Read GitHub metrics from MongoDB Atlas and export to CSV') + .version('1.0.0'); + +// Direct invocation with command-line arguments +program + .command('export') + .description('Export metrics to CSV using command-line arguments') + .option('-s, --start-date ', 'Start date for the report (ISO format, e.g., 2024-01-01)') + .option('-e, --end-date ', 'End date for the report (ISO format, e.g., 2024-12-31)') + .option('-p, --projects ', 'List of owner/repo projects (e.g., mongodb/docs realm/realm-js)') + .option('-o, --output ', 'Output directory for CSV files') + .action(async (options) => { + try { + const dateRange = buildDateRange(options.startDate, options.endDate); + const projects = parseProjects(options.projects); + const outputDir = options.output || generateOutputDir(dateRange); + + console.log('Fetching metrics from MongoDB Atlas...'); + if (dateRange.startDate || dateRange.endDate) { + console.log(`Date range: ${dateRange.startDate || 'beginning'} to ${dateRange.endDate || 'now'}`); + } + if (projects.length > 0) { + console.log(`Projects: ${projects.map(p => `${p.owner}/${p.repo}`).join(', ')}`); + } else { + console.log('Projects: all'); + } + + const metrics = await readMetricsFromAtlas(dateRange, projects); + await writeMetricsToCsv(metrics, outputDir); + } catch (error) { + console.error('Error:', error.message); + process.exit(1); + } + }); + +// Config file invocation +program + .command('export-config') + .description('Export metrics to CSV using a configuration file') + .argument('', 'Path to JSON configuration file') + .option('-o, --output ', 'Output directory for CSV files (overrides config file)') + .action(async (configFile, options) => { + try { + const config = loadConfig(configFile); + const dateRange = buildDateRange(config.startDate, config.endDate); + const projects = config.projects || []; + const outputDir = options.output || config.output || generateOutputDir(dateRange); + + console.log('Fetching metrics from MongoDB Atlas...'); + if (dateRange.startDate || dateRange.endDate) { + console.log(`Date range: ${dateRange.startDate || 'beginning'} to ${dateRange.endDate || 'now'}`); + } + if (projects.length > 0) { + console.log(`Projects: ${projects.map(p => `${p.owner}/${p.repo}`).join(', ')}`); + } else { + console.log('Projects: all'); + } + + const metrics = await readMetricsFromAtlas(dateRange, projects); + await writeMetricsToCsv(metrics, outputDir); + } catch (error) { + console.error('Error:', error.message); + process.exit(1); + } + }); + +/** + * Build a date range object from start and end date strings. + */ +function buildDateRange(startDate, endDate) { + const dateRange = {}; + if (startDate) { + dateRange.startDate = startDate; + } + if (endDate) { + dateRange.endDate = endDate; + } + return dateRange; +} + +/** + * Parse project strings (owner/repo format) into project objects. + */ +function parseProjects(projectStrings) { + if (!projectStrings || projectStrings.length === 0) { + return []; + } + + return projectStrings.map(projectStr => { + const parts = projectStr.split('/'); + if (parts.length !== 2) { + throw new Error(`Invalid project format: "${projectStr}". Expected format: owner/repo`); + } + return { owner: parts[0], repo: parts[1] }; + }); +} + +/** + * Load and parse a JSON configuration file. + */ +function loadConfig(configPath) { + try { + const content = readFileSync(configPath, 'utf-8'); + return JSON.parse(content); + } catch (error) { + if (error.code === 'ENOENT') { + throw new Error(`Configuration file not found: ${configPath}`); + } + if (error instanceof SyntaxError) { + throw new Error(`Invalid JSON in configuration file: ${error.message}`); + } + throw error; + } +} + +program.parse(); \ No newline at end of file diff --git a/github-reporting/package-lock.json b/github-reporting/package-lock.json new file mode 100644 index 0000000..be852e9 --- /dev/null +++ b/github-reporting/package-lock.json @@ -0,0 +1,183 @@ +{ + "name": "github-reporting", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "github-reporting", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "commander": "^14.0.2", + "csv-writer": "^1.6.0", + "mongodb": "7.0" + } + }, + "node_modules/@mongodb-js/saslprep": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.4.4.tgz", + "integrity": "sha512-p7X/ytJDIdwUfFL/CLOhKgdfJe1Fa8uw9seJYvdOmnP9JBWGWHW69HkOixXS6Wy9yvGf1MbhcS6lVmrhy4jm2g==", + "license": "MIT", + "dependencies": { + "sparse-bitfield": "^3.0.3" + } + }, + "node_modules/@types/webidl-conversions": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.3.tgz", + "integrity": "sha512-CiJJvcRtIgzadHCYXw7dqEnMNRjhGZlYK05Mj9OyktqV8uVT8fD2BFOB7S1uwBE3Kj2Z+4UyPmFw/Ixgw/LAlA==", + "license": "MIT" + }, + "node_modules/@types/whatwg-url": { + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-13.0.0.tgz", + "integrity": "sha512-N8WXpbE6Wgri7KUSvrmQcqrMllKZ9uxkYWMt+mCSGwNc0Hsw9VQTW7ApqI4XNrx6/SaM2QQJCzMPDEXE058s+Q==", + "license": "MIT", + "dependencies": { + "@types/webidl-conversions": "*" + } + }, + "node_modules/bson": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/bson/-/bson-7.0.0.tgz", + "integrity": "sha512-Kwc6Wh4lQ5OmkqqKhYGKIuELXl+EPYSCObVE6bWsp1T/cGkOCBN0I8wF/T44BiuhHyNi1mmKVPXk60d41xZ7kw==", + "license": "Apache-2.0", + "engines": { + "node": ">=20.19.0" + } + }, + "node_modules/commander": { + "version": "14.0.2", + "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.2.tgz", + "integrity": "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==", + "license": "MIT", + "engines": { + "node": ">=20" + } + }, + "node_modules/csv-writer": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/csv-writer/-/csv-writer-1.6.0.tgz", + "integrity": "sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g==", + "license": "MIT" + }, + "node_modules/memory-pager": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", + "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", + "license": "MIT" + }, + "node_modules/mongodb": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-7.0.0.tgz", + "integrity": "sha512-vG/A5cQrvGGvZm2mTnCSz1LUcbOPl83hfB6bxULKQ8oFZauyox/2xbZOoGNl+64m8VBrETkdGCDBdOsCr3F3jg==", + "license": "Apache-2.0", + "dependencies": { + "@mongodb-js/saslprep": "^1.3.0", + "bson": "^7.0.0", + "mongodb-connection-string-url": "^7.0.0" + }, + "engines": { + "node": ">=20.19.0" + }, + "peerDependencies": { + "@aws-sdk/credential-providers": "^3.806.0", + "@mongodb-js/zstd": "^7.0.0", + "gcp-metadata": "^7.0.1", + "kerberos": "^7.0.0", + "mongodb-client-encryption": ">=7.0.0 <7.1.0", + "snappy": "^7.3.2", + "socks": "^2.8.6" + }, + "peerDependenciesMeta": { + "@aws-sdk/credential-providers": { + "optional": true + }, + "@mongodb-js/zstd": { + "optional": true + }, + "gcp-metadata": { + "optional": true + }, + "kerberos": { + "optional": true + }, + "mongodb-client-encryption": { + "optional": true + }, + "snappy": { + "optional": true + }, + "socks": { + "optional": true + } + } + }, + "node_modules/mongodb-connection-string-url": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-7.0.0.tgz", + "integrity": "sha512-irhhjRVLE20hbkRl4zpAYLnDMM+zIZnp0IDB9akAFFUZp/3XdOfwwddc7y6cNvF2WCEtfTYRwYbIfYa2kVY0og==", + "license": "Apache-2.0", + "dependencies": { + "@types/whatwg-url": "^13.0.0", + "whatwg-url": "^14.1.0" + }, + "engines": { + "node": ">=20.19.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/sparse-bitfield": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", + "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", + "license": "MIT", + "dependencies": { + "memory-pager": "^1.0.2" + } + }, + "node_modules/tr46": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz", + "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==", + "license": "MIT", + "dependencies": { + "punycode": "^2.3.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/whatwg-url": { + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz", + "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==", + "license": "MIT", + "dependencies": { + "tr46": "^5.1.0", + "webidl-conversions": "^7.0.0" + }, + "engines": { + "node": ">=18" + } + } + } +} diff --git a/github-reporting/package.json b/github-reporting/package.json new file mode 100644 index 0000000..db52a7a --- /dev/null +++ b/github-reporting/package.json @@ -0,0 +1,27 @@ +{ + "name": "github-reporting", + "version": "1.0.0", + "description": "Pull reports from GitHub data stored in MongoDB Atlas.", + "repository": { + "type": "git", + "url": "github.com/grove-platform/tooling" + }, + "license": "MIT", + "author": "MongoDB Developer Docs", + "type": "module", + "main": "index.js", + "bin": { + "github-reporting": "./index.js" + }, + "scripts": { + "start": "node index.js", + "export": "node index.js export", + "export-config": "node index.js export-config", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "dependencies": { + "commander": "^14.0.2", + "csv-writer": "^1.6.0", + "mongodb": "7.0" + } +} diff --git a/github-reporting/read-from-db.js b/github-reporting/read-from-db.js new file mode 100644 index 0000000..b212b79 --- /dev/null +++ b/github-reporting/read-from-db.js @@ -0,0 +1,97 @@ +import { MongoClient } from 'mongodb'; + +/** + * Read metrics from MongoDB Atlas for the given date range and projects. + * @param {Object} dateRange - Object with startDate and endDate properties (ISO strings or Date objects) + * @param {Array} projects - Array of {owner, repo} objects. If empty/null, reads from all collections. + * @returns {Promise} Array of metric documents + */ +async function readMetricsFromAtlas(dateRange, projects) { + const uri = process.env.ATLAS_CONNECTION_STRING; + if (!uri) { + throw new Error('ATLAS_CONNECTION_STRING environment variable is not set'); + } + + const client = new MongoClient(uri); + let metricDocuments = []; + + try { + await client.connect(); + const database = client.db("github_metrics"); + + if (projects && projects.length > 0) { + // Get metrics for specific projects + for (const project of projects) { + const projectMetrics = await getProjectMetrics(dateRange, project, database); + metricDocuments = metricDocuments.concat(projectMetrics); + } + } else { + // Get all data from all collections matching the date range + const collections = await database.listCollections().toArray(); + for (const collInfo of collections) { + // Skip system collections + if (collInfo.name.startsWith('system.')) continue; + + const metrics = await getCollectionMetrics(dateRange, collInfo.name, database); + metricDocuments = metricDocuments.concat(metrics); + } + } + + return metricDocuments; + } finally { + await client.close(); + } +} + +/** + * Get metrics for a specific project (owner/repo combination). + * @param {Object} dateRange - Object with startDate and endDate properties + * @param {Object} project - Object with owner and repo properties + * @param {Db} database - MongoDB database instance + * @returns {Promise} Array of metric documents for this project + */ +async function getProjectMetrics(dateRange, project, database) { + const collName = project.owner + "_" + project.repo; + return getCollectionMetrics(dateRange, collName, database); +} + +/** + * Get metrics from a specific collection within the date range. + * @param {Object} dateRange - Object with startDate and endDate properties + * @param {string} collName - Collection name + * @param {Db} database - MongoDB database instance + * @returns {Promise} Array of metric documents + */ +async function getCollectionMetrics(dateRange, collName, database) { + try { + const coll = database.collection(collName); + + // Build the date filter query + const query = {}; + if (dateRange) { + query.date = {}; + if (dateRange.startDate) { + query.date.$gte = new Date(dateRange.startDate).toISOString(); + } + if (dateRange.endDate) { + query.date.$lte = new Date(dateRange.endDate).toISOString(); + } + // If no date constraints were added, remove the empty date object + if (Object.keys(query.date).length === 0) { + delete query.date; + } + } + + const documents = await coll.find(query).sort({ date: 1 }).toArray(); + return documents; + } catch (err) { + console.error(`There was a problem fetching data from the '${collName}' collection: `, err); + return []; + } +} + +export { + readMetricsFromAtlas, + getProjectMetrics, + getCollectionMetrics, +} \ No newline at end of file diff --git a/github-reporting/write-csv.js b/github-reporting/write-csv.js new file mode 100644 index 0000000..63462ae --- /dev/null +++ b/github-reporting/write-csv.js @@ -0,0 +1,167 @@ +import { createObjectCsvWriter } from 'csv-writer'; +import path from 'path'; +import { mkdirSync } from 'fs'; + +/** + * Write metrics data to three separate CSV files: summary, referrals, and top-paths. + * @param {Array} metrics - Array of metric documents from MongoDB + * @param {string} outputDir - Directory path for output CSV files + * @returns {Promise} Object with paths to the created files + */ +async function writeMetricsToCsv(metrics, outputDir) { + if (!metrics || metrics.length === 0) { + console.log('No metrics to write.'); + return null; + } + + // Ensure output directory exists + mkdirSync(outputDir, { recursive: true }); + + const summaryPath = path.join(outputDir, 'summary.csv'); + const referralsPath = path.join(outputDir, 'referrals.csv'); + const topPathsPath = path.join(outputDir, 'top-paths.csv'); + + // Write all three files + await Promise.all([ + writeSummaryCsv(metrics, summaryPath), + writeReferralsCsv(metrics, referralsPath), + writeTopPathsCsv(metrics, topPathsPath), + ]); + + console.log(`\nSuccessfully wrote reports to ${outputDir}/`); + console.log(` - summary.csv (${metrics.length} records)`); + + return { summaryPath, referralsPath, topPathsPath }; +} + +/** + * Write the summary CSV with core metrics (no arrays). + */ +async function writeSummaryCsv(metrics, outputPath) { + const csvWriter = createObjectCsvWriter({ + path: outputPath, + header: [ + { id: 'date', title: 'Date' }, + { id: 'owner', title: 'Owner' }, + { id: 'repo', title: 'Repository' }, + { id: 'clones', title: 'Clones' }, + { id: 'viewCount', title: 'Page Views' }, + { id: 'uniqueViews', title: 'Unique Views' }, + { id: 'stars', title: 'Stars' }, + { id: 'forks', title: 'Forks' }, + { id: 'watchers', title: 'Watchers' }, + ] + }); + + const records = metrics.map(metric => ({ + date: metric.date, + owner: metric.owner, + repo: metric.repo, + clones: metric.clones, + viewCount: metric.viewCount, + uniqueViews: metric.uniqueViews, + stars: metric.stars, + forks: metric.forks, + watchers: metric.watchers, + })); + + await csvWriter.writeRecords(records); +} + +/** + * Write the referrals CSV with one row per referrer per date/repo. + */ +async function writeReferralsCsv(metrics, outputPath) { + const csvWriter = createObjectCsvWriter({ + path: outputPath, + header: [ + { id: 'date', title: 'Date' }, + { id: 'owner', title: 'Owner' }, + { id: 'repo', title: 'Repository' }, + { id: 'referrer', title: 'Referrer' }, + { id: 'count', title: 'Count' }, + { id: 'uniques', title: 'Uniques' }, + ] + }); + + const records = []; + for (const metric of metrics) { + const referralSources = metric.referralSources || []; + for (const source of referralSources) { + records.push({ + date: metric.date, + owner: metric.owner, + repo: metric.repo, + referrer: source.referrer, + count: source.count, + uniques: source.uniques, + }); + } + } + + await csvWriter.writeRecords(records); + console.log(` - referrals.csv (${records.length} records)`); +} + +/** + * Write the top-paths CSV with one row per path per date/repo. + */ +async function writeTopPathsCsv(metrics, outputPath) { + const csvWriter = createObjectCsvWriter({ + path: outputPath, + header: [ + { id: 'date', title: 'Date' }, + { id: 'owner', title: 'Owner' }, + { id: 'repo', title: 'Repository' }, + { id: 'path', title: 'Path' }, + { id: 'count', title: 'Count' }, + { id: 'uniques', title: 'Uniques' }, + ] + }); + + const records = []; + for (const metric of metrics) { + const topPaths = metric.topPaths || []; + for (const pathEntry of topPaths) { + records.push({ + date: metric.date, + owner: metric.owner, + repo: metric.repo, + path: pathEntry.path, + count: pathEntry.count, + uniques: pathEntry.uniques, + }); + } + } + + await csvWriter.writeRecords(records); + console.log(` - top-paths.csv (${records.length} records)`); +} + +/** + * Generate a default output directory name based on date range and timestamp. + * @param {Object} dateRange - Object with startDate and endDate properties + * @returns {string} Generated directory name + */ +function generateOutputDir(dateRange) { + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + let dirname = `github-metrics-${timestamp}`; + + if (dateRange) { + if (dateRange.startDate) { + const start = new Date(dateRange.startDate).toISOString().split('T')[0]; + dirname = `github-metrics-from-${start}`; + } + if (dateRange.endDate) { + const end = new Date(dateRange.endDate).toISOString().split('T')[0]; + dirname += `-to-${end}`; + } + } + + return dirname; +} + +export { + writeMetricsToCsv, + generateOutputDir, +} \ No newline at end of file