diff --git a/CHANGELOG.md b/CHANGELOG.md index 03a8728fc9..3f0f4e4211 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,8 @@ This is the log of notable changes to EAS CLI and related packages. ### ๐ŸŽ‰ New features +- Add `eas observe:metrics` command for monitoring app performance metrics. ([#3401](https://github.com/expo/eas-cli/pull/3401) by [@ubax](https://github.com/ubax)) + ### ๐Ÿ› Bug fixes ### ๐Ÿงน Chores diff --git a/packages/eas-cli/package.json b/packages/eas-cli/package.json index d262a24fc5..b9749de743 100644 --- a/packages/eas-cli/package.json +++ b/packages/eas-cli/package.json @@ -203,6 +203,9 @@ "metadata": { "description": "manage store configuration" }, + "observe": { + "description": "monitor app performance metrics" + }, "project": { "description": "manage project" }, diff --git a/packages/eas-cli/src/commands/observe/__tests__/metrics.test.ts b/packages/eas-cli/src/commands/observe/__tests__/metrics.test.ts new file mode 100644 index 0000000000..c64e06aea6 --- /dev/null +++ b/packages/eas-cli/src/commands/observe/__tests__/metrics.test.ts @@ -0,0 +1,189 @@ +import { Config } from '@oclif/core'; + +import { ExpoGraphqlClient } from '../../../commandUtils/context/contextUtils/createGraphqlClient'; +import { AppPlatform } from '../../../graphql/generated'; +import { fetchObserveMetricsAsync } from '../../../observe/fetchMetrics'; +import { buildObserveMetricsJson, buildObserveMetricsTable } from '../../../observe/formatMetrics'; +import { enableJsonOutput, printJsonOnlyOutput } from '../../../utils/json'; +import ObserveMetrics from '../metrics'; + +jest.mock('../../../observe/fetchMetrics', () => ({ + fetchObserveMetricsAsync: jest.fn(), +})); +jest.mock('../../../observe/formatMetrics', () => ({ + ...jest.requireActual('../../../observe/formatMetrics'), + buildObserveMetricsTable: jest.fn().mockReturnValue('table'), + buildObserveMetricsJson: jest.fn().mockReturnValue([]), +})); +jest.mock('../../../log'); +jest.mock('../../../utils/json'); + +const mockFetchObserveMetricsAsync = jest.mocked(fetchObserveMetricsAsync); +const mockBuildObserveMetricsTable = jest.mocked(buildObserveMetricsTable); +const mockBuildObserveMetricsJson = jest.mocked(buildObserveMetricsJson); +const mockEnableJsonOutput = jest.mocked(enableJsonOutput); +const mockPrintJsonOnlyOutput = jest.mocked(printJsonOnlyOutput); + +describe(ObserveMetrics, () => { + const graphqlClient = {} as any as ExpoGraphqlClient; + const mockConfig = {} as unknown as Config; + const projectId = 'test-project-id'; + + beforeEach(() => { + jest.clearAllMocks(); + mockFetchObserveMetricsAsync.mockResolvedValue(new Map()); + }); + + function createCommand(argv: string[]): ObserveMetrics { + const command = new ObserveMetrics(argv, mockConfig); + // @ts-expect-error getContextAsync is a protected method + jest.spyOn(command, 'getContextAsync').mockReturnValue({ + projectId, + loggedIn: { graphqlClient }, + }); + return command; + } + + it('fetches metrics with default parameters (both platforms)', async () => { + const now = new Date('2025-06-15T12:00:00.000Z'); + jest.useFakeTimers({ now }); + + const command = createCommand([]); + await command.runAsync(); + + expect(mockFetchObserveMetricsAsync).toHaveBeenCalledTimes(1); + const platforms = mockFetchObserveMetricsAsync.mock.calls[0][3]; + expect(platforms).toEqual([AppPlatform.Android, AppPlatform.Ios]); + + jest.useRealTimers(); + }); + + it('queries only Android when --platform android is passed', async () => { + const command = createCommand(['--platform', 'android']); + await command.runAsync(); + + const platforms = mockFetchObserveMetricsAsync.mock.calls[0][3]; + expect(platforms).toEqual([AppPlatform.Android]); + }); + + it('queries only iOS when --platform ios is passed', async () => { + const command = createCommand(['--platform', 'ios']); + await command.runAsync(); + + const platforms = mockFetchObserveMetricsAsync.mock.calls[0][3]; + expect(platforms).toEqual([AppPlatform.Ios]); + }); + + it('resolves --metric aliases before passing to fetchObserveMetricsAsync', async () => { + const command = createCommand(['--metric', 'tti', '--metric', 'cold_launch']); + await command.runAsync(); + + const metricNames = mockFetchObserveMetricsAsync.mock.calls[0][2]; + expect(metricNames).toEqual(['expo.app_startup.tti', 'expo.app_startup.cold_launch_time']); + }); + + it('uses default time range (60 days back) when no --start/--end flags', async () => { + const now = new Date('2025-06-15T12:00:00.000Z'); + jest.useFakeTimers({ now }); + + const command = createCommand([]); + await command.runAsync(); + + const startTime = mockFetchObserveMetricsAsync.mock.calls[0][4]; + const endTime = mockFetchObserveMetricsAsync.mock.calls[0][5]; + expect(endTime).toBe('2025-06-15T12:00:00.000Z'); + expect(startTime).toBe('2025-04-16T12:00:00.000Z'); + + jest.useRealTimers(); + }); + + it('uses explicit --start and --end when provided', async () => { + const command = createCommand([ + '--start', + '2025-01-01T00:00:00.000Z', + '--end', + '2025-02-01T00:00:00.000Z', + ]); + await command.runAsync(); + + const startTime = mockFetchObserveMetricsAsync.mock.calls[0][4]; + const endTime = mockFetchObserveMetricsAsync.mock.calls[0][5]; + expect(startTime).toBe('2025-01-01T00:00:00.000Z'); + expect(endTime).toBe('2025-02-01T00:00:00.000Z'); + }); + + it('passes resolved --stat flags to buildObserveMetricsTable', async () => { + const command = createCommand(['--stat', 'p90', '--stat', 'count']); + await command.runAsync(); + + expect(mockBuildObserveMetricsTable).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [ + 'p90', + 'eventCount', + ]); + }); + + it('deduplicates --stat flags that resolve to the same key', async () => { + const command = createCommand(['--stat', 'med', '--stat', 'median']); + await command.runAsync(); + + expect(mockBuildObserveMetricsTable).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [ + 'median', + ]); + }); + + it('uses --days-from-now to compute start/end time range', async () => { + const now = new Date('2025-06-15T12:00:00.000Z'); + jest.useFakeTimers({ now }); + + const command = createCommand(['--days-from-now', '7']); + await command.runAsync(); + + const startTime = mockFetchObserveMetricsAsync.mock.calls[0][4]; + const endTime = mockFetchObserveMetricsAsync.mock.calls[0][5]; + expect(endTime).toBe('2025-06-15T12:00:00.000Z'); + expect(startTime).toBe('2025-06-08T12:00:00.000Z'); + + jest.useRealTimers(); + }); + + it('rejects --days-from-now combined with --start', async () => { + const command = createCommand(['--days-from-now', '7', '--start', '2025-01-01T00:00:00.000Z']); + + await expect(command.runAsync()).rejects.toThrow(); + }); + + it('rejects --days-from-now combined with --end', async () => { + const command = createCommand(['--days-from-now', '7', '--end', '2025-02-01T00:00:00.000Z']); + + await expect(command.runAsync()).rejects.toThrow(); + }); + + it('uses default stats when --stat is not provided', async () => { + const command = createCommand([]); + await command.runAsync(); + + expect(mockBuildObserveMetricsTable).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [ + 'median', + 'eventCount', + ]); + }); + + it('passes resolved --stat flags to buildObserveMetricsJson when --json is used', async () => { + const command = createCommand([ + '--json', + '--non-interactive', + '--stat', + 'min', + '--stat', + 'avg', + ]); + await command.runAsync(); + + expect(mockEnableJsonOutput).toHaveBeenCalled(); + expect(mockBuildObserveMetricsJson).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [ + 'min', + 'average', + ]); + expect(mockPrintJsonOnlyOutput).toHaveBeenCalled(); + }); +}); diff --git a/packages/eas-cli/src/commands/observe/metrics.ts b/packages/eas-cli/src/commands/observe/metrics.ts new file mode 100644 index 0000000000..3e5b37c3d5 --- /dev/null +++ b/packages/eas-cli/src/commands/observe/metrics.ts @@ -0,0 +1,143 @@ +import { Flags } from '@oclif/core'; + +import EasCommand from '../../commandUtils/EasCommand'; +import { EasNonInteractiveAndJsonFlags } from '../../commandUtils/flags'; +import { AppPlatform } from '../../graphql/generated'; +import Log from '../../log'; +import { fetchObserveMetricsAsync } from '../../observe/fetchMetrics'; +import { + StatisticKey, + buildObserveMetricsJson, + buildObserveMetricsTable, + resolveStatKey, +} from '../../observe/formatMetrics'; +import { validateDateFlag } from '../../observe/utils'; +import { resolveMetricName } from '../../observe/metricNames'; +import { enableJsonOutput, printJsonOnlyOutput } from '../../utils/json'; + +const DEFAULT_METRICS = [ + 'expo.app_startup.cold_launch_time', + 'expo.app_startup.warm_launch_time', + 'expo.app_startup.tti', + 'expo.app_startup.ttr', + 'expo.app_startup.bundle_load_time', +]; + +const DEFAULT_DAYS_BACK = 60; + +const TABLE_FORMAT_DEFAULT_STATS: StatisticKey[] = ['median', 'eventCount']; +const JSON_FORMAT_DEFAULT_STATS: StatisticKey[] = [ + 'min', + 'median', + 'max', + 'average', + 'p80', + 'p90', + 'p99', + 'eventCount', +]; + +export default class ObserveMetrics extends EasCommand { + static override description = 'display app performance metrics grouped by app version'; + + static override flags = { + platform: Flags.enum<'android' | 'ios'>({ + description: 'Filter by platform', + options: ['android', 'ios'], + }), + metric: Flags.string({ + description: + 'Metric name to display (can be specified multiple times). Supports aliases: tti, ttr, cold_launch, warm_launch, bundle_load', + multiple: true, + }), + stat: Flags.string({ + description: + 'Statistic to display per metric (can be specified multiple times). Options: min, max, med, avg, p80, p90, p99, count', + multiple: true, + }), + start: Flags.string({ + description: 'Start of time range for metrics data (ISO date).', + exclusive: ['days-from-now'], + }), + end: Flags.string({ + description: 'End of time range for metrics data (ISO date).', + exclusive: ['days-from-now'], + }), + 'days-from-now': Flags.integer({ + description: 'Show metrics from the last N days (mutually exclusive with --start/--end)', + min: 1, + exclusive: ['start', 'end'], + }), + ...EasNonInteractiveAndJsonFlags, + }; + + static override contextDefinition = { + ...this.ContextOptions.ProjectId, + ...this.ContextOptions.LoggedIn, + }; + + async runAsync(): Promise { + const { flags } = await this.parse(ObserveMetrics); + const { + projectId, + loggedIn: { graphqlClient }, + } = await this.getContextAsync(ObserveMetrics, { + nonInteractive: flags['non-interactive'], + }); + + if (flags.json) { + enableJsonOutput(); + } else { + Log.warn('EAS Observe is in preview and subject to breaking changes.'); + } + + if (flags.start) { + validateDateFlag(flags.start, '--start'); + } + if (flags.end) { + validateDateFlag(flags.end, '--end'); + } + + const metricNames = flags.metric?.length + ? flags.metric.map(resolveMetricName) + : DEFAULT_METRICS; + + let startTime: string; + let endTime: string; + + if (flags['days-from-now']) { + endTime = new Date().toISOString(); + startTime = new Date(Date.now() - flags['days-from-now'] * 24 * 60 * 60 * 1000).toISOString(); + } else { + endTime = flags.end ?? new Date().toISOString(); + startTime = + flags.start ?? new Date(Date.now() - DEFAULT_DAYS_BACK * 24 * 60 * 60 * 1000).toISOString(); + } + + const platforms: AppPlatform[] = flags.platform + ? [flags.platform === 'android' ? AppPlatform.Android : AppPlatform.Ios] + : [AppPlatform.Android, AppPlatform.Ios]; + + const metricsMap = await fetchObserveMetricsAsync( + graphqlClient, + projectId, + metricNames, + platforms, + startTime, + endTime + ); + + const argumentsStat = flags.stat?.length + ? Array.from(new Set(flags.stat.map(resolveStatKey))) + : undefined; + + if (flags.json) { + const stats: StatisticKey[] = argumentsStat ?? JSON_FORMAT_DEFAULT_STATS; + printJsonOnlyOutput(buildObserveMetricsJson(metricsMap, metricNames, stats)); + } else { + const stats: StatisticKey[] = argumentsStat ?? TABLE_FORMAT_DEFAULT_STATS; + Log.addNewLineIfNone(); + Log.log(buildObserveMetricsTable(metricsMap, metricNames, stats)); + } + } +} diff --git a/packages/eas-cli/src/graphql/queries/ObserveQuery.ts b/packages/eas-cli/src/graphql/queries/ObserveQuery.ts new file mode 100644 index 0000000000..3fa2027487 --- /dev/null +++ b/packages/eas-cli/src/graphql/queries/ObserveQuery.ts @@ -0,0 +1,86 @@ +import gql from 'graphql-tag'; + +import { ExpoGraphqlClient } from '../../commandUtils/context/contextUtils/createGraphqlClient'; +import { withErrorHandlingAsync } from '../client'; +import { + AppObservePlatform, + AppObserveTimeSeriesInput, + AppObserveVersionMarker, +} from '../generated'; + +type AppObserveTimeSeriesQuery = { + app: { + byId: { + id: string; + observe: { + timeSeries: { + versionMarkers: AppObserveVersionMarker[]; + }; + }; + }; + }; +}; + +type AppObserveTimeSeriesQueryVariables = { + appId: string; + input: Pick; +}; + +export const ObserveQuery = { + async timeSeriesVersionMarkersAsync( + graphqlClient: ExpoGraphqlClient, + { + appId, + metricName, + platform, + startTime, + endTime, + }: { + appId: string; + metricName: string; + platform: AppObservePlatform; + startTime: string; + endTime: string; + } + ): Promise { + const data = await withErrorHandlingAsync( + graphqlClient + .query( + gql` + query AppObserveTimeSeries($appId: String!, $input: AppObserveTimeSeriesInput!) { + app { + byId(appId: $appId) { + id + observe { + timeSeries(input: $input) { + versionMarkers { + appVersion + eventCount + firstSeenAt + statistics { + min + max + median + average + p80 + p90 + p99 + } + } + } + } + } + } + } + `, + { + appId, + input: { metricName, platform, startTime, endTime }, + } + ) + .toPromise() + ); + + return data.app.byId.observe.timeSeries.versionMarkers; + }, +}; diff --git a/packages/eas-cli/src/observe/__tests__/fetchMetrics.test.ts b/packages/eas-cli/src/observe/__tests__/fetchMetrics.test.ts new file mode 100644 index 0000000000..ce7c1c5386 --- /dev/null +++ b/packages/eas-cli/src/observe/__tests__/fetchMetrics.test.ts @@ -0,0 +1,137 @@ +import { AppObservePlatform, AppPlatform } from '../../graphql/generated'; +import { ObserveQuery } from '../../graphql/queries/ObserveQuery'; +import { makeMetricsKey } from '../utils'; +import { fetchObserveMetricsAsync } from '../fetchMetrics'; + +jest.mock('../../graphql/queries/ObserveQuery'); +jest.mock('../../log'); + +const SIMPLE_MARKER = { + __typename: 'AppObserveVersionMarker' as const, + appVersion: '1.0.0', + eventCount: 100, + firstSeenAt: '2025-01-01T00:00:00.000Z', + statistics: { + __typename: 'AppObserveVersionMarkerStatistics' as const, + min: 0.1, + max: 0.5, + median: 0.2, + average: 0.3, + p80: 0.35, + p90: 0.4, + p99: 0.48, + }, +}; + +describe('fetchObserveMetricsAsync', () => { + const mockTimeSeriesMarkers = jest.mocked(ObserveQuery.timeSeriesVersionMarkersAsync); + const mockGraphqlClient = {} as any; + + beforeEach(() => { + mockTimeSeriesMarkers.mockClear(); + }); + + // TODO(@ubax): add support for fetching multiple metrics and platforms in a single query + it('creates queries for each metric+platform combination and assembles metricsMap', async () => { + mockTimeSeriesMarkers + .mockResolvedValueOnce([{ ...SIMPLE_MARKER, eventCount: 100 }]) + .mockResolvedValueOnce([{ ...SIMPLE_MARKER, eventCount: 80 }]); + + const metricsMap = await fetchObserveMetricsAsync( + mockGraphqlClient, + 'project-123', + ['expo.app_startup.tti', 'expo.app_startup.cold_launch_time'], + [AppPlatform.Ios], + '2025-01-01T00:00:00.000Z', + '2025-03-01T00:00:00.000Z' + ); + + expect(mockTimeSeriesMarkers).toHaveBeenCalledTimes(2); + expect(mockTimeSeriesMarkers).toHaveBeenNthCalledWith(1, mockGraphqlClient, { + appId: 'project-123', + metricName: 'expo.app_startup.tti', + platform: AppObservePlatform.Ios, + startTime: '2025-01-01T00:00:00.000Z', + endTime: '2025-03-01T00:00:00.000Z', + }); + expect(mockTimeSeriesMarkers).toHaveBeenNthCalledWith(2, mockGraphqlClient, { + appId: 'project-123', + metricName: 'expo.app_startup.cold_launch_time', + platform: AppObservePlatform.Ios, + startTime: '2025-01-01T00:00:00.000Z', + endTime: '2025-03-01T00:00:00.000Z', + }); + + const key = makeMetricsKey('1.0.0', AppPlatform.Ios); + const metricsForVersion = metricsMap.get(key)!; + expect(metricsForVersion.get('expo.app_startup.tti')).toEqual( + expect.objectContaining({ eventCount: 100, min: 0.1, p99: 0.48 }) + ); + expect(metricsForVersion.get('expo.app_startup.cold_launch_time')).toEqual( + expect.objectContaining({ eventCount: 80 }) + ); + }); + + it('creates queries for each platform', async () => { + mockTimeSeriesMarkers.mockResolvedValue([]); + + await fetchObserveMetricsAsync( + mockGraphqlClient, + 'project-123', + ['expo.app_startup.tti'], + [AppPlatform.Ios, AppPlatform.Android], + '2025-01-01T00:00:00.000Z', + '2025-03-01T00:00:00.000Z' + ); + + expect(mockTimeSeriesMarkers).toHaveBeenCalledTimes(2); + expect(mockTimeSeriesMarkers).toHaveBeenNthCalledWith(1, mockGraphqlClient, { + appId: 'project-123', + metricName: 'expo.app_startup.tti', + platform: AppObservePlatform.Ios, + startTime: '2025-01-01T00:00:00.000Z', + endTime: '2025-03-01T00:00:00.000Z', + }); + expect(mockTimeSeriesMarkers).toHaveBeenNthCalledWith(2, mockGraphqlClient, { + appId: 'project-123', + metricName: 'expo.app_startup.tti', + platform: AppObservePlatform.Android, + startTime: '2025-01-01T00:00:00.000Z', + endTime: '2025-03-01T00:00:00.000Z', + }); + }); + + it('handles partial failures gracefully - successful queries still populate metricsMap', async () => { + mockTimeSeriesMarkers + .mockResolvedValueOnce([SIMPLE_MARKER]) + .mockRejectedValueOnce(new Error('Unknown metric')); + + const metricsMap = await fetchObserveMetricsAsync( + mockGraphqlClient, + 'project-123', + ['expo.app_startup.tti', 'bad.metric'], + [AppPlatform.Android], + '2025-01-01T00:00:00.000Z', + '2025-03-01T00:00:00.000Z' + ); + + const key = makeMetricsKey('1.0.0', AppPlatform.Android); + expect(metricsMap.get(key)!.has('expo.app_startup.tti')).toBe(true); + expect(metricsMap.get(key)!.has('bad.metric')).toBe(false); + }); + + it('returns empty map when all queries fail', async () => { + mockTimeSeriesMarkers.mockRejectedValue(new Error('Network error')); + + const metricsMap = await fetchObserveMetricsAsync( + mockGraphqlClient, + 'project-123', + ['expo.app_startup.tti'], + [AppPlatform.Ios], + '2025-01-01T00:00:00.000Z', + '2025-03-01T00:00:00.000Z' + ); + + expect(metricsMap.size).toBe(0); + }); +}); diff --git a/packages/eas-cli/src/observe/__tests__/formatMetrics.test.ts b/packages/eas-cli/src/observe/__tests__/formatMetrics.test.ts new file mode 100644 index 0000000000..5a46c3e864 --- /dev/null +++ b/packages/eas-cli/src/observe/__tests__/formatMetrics.test.ts @@ -0,0 +1,297 @@ +import { AppPlatform } from '../../graphql/generated'; +import { + buildObserveMetricsJson, + buildObserveMetricsTable, + resolveStatKey, +} from '../formatMetrics'; +import type { MetricValues, ObserveMetricsMap } from '../metrics.types'; +import { makeMetricsKey } from '../utils'; + +function makeMetricValueWithDefaults(overrides: Partial): MetricValues { + return { + min: 0.1, + median: 0.3, + max: 1.1, + average: 0.5, + p80: 0.8, + p90: 0.9, + p99: 1.0, + eventCount: 100, + ...overrides, + }; +} + +describe(buildObserveMetricsTable, () => { + it('formats metrics grouped by version with metric columns', () => { + const metricsMap: ObserveMetricsMap = new Map(); + const iosKey = makeMetricsKey('1.2.0', AppPlatform.Ios); + metricsMap.set( + iosKey, + new Map([ + [ + 'expo.app_startup.cold_launch_time', + makeMetricValueWithDefaults({ median: 0.35, eventCount: 110 }), + ], + ['expo.app_startup.tti', makeMetricValueWithDefaults({ median: 1.32123, eventCount: 90 })], + ]) + ); + + const androidKey = makeMetricsKey('1.1.0', AppPlatform.Android); + metricsMap.set( + androidKey, + new Map([ + [ + 'expo.app_startup.cold_launch_time', + makeMetricValueWithDefaults({ median: 0.25, eventCount: 120 }), + ], + ['expo.app_startup.tti', makeMetricValueWithDefaults({ median: 1.12111, eventCount: 100 })], + ]) + ); + + const output = buildObserveMetricsTable( + metricsMap, + ['expo.app_startup.cold_launch_time', 'expo.app_startup.tti'], + ['median', 'eventCount'] + ); + + // The header is bolded, thus the escape characters in the snapshot + expect(output).toMatchInlineSnapshot(` +"App Version Platform Cold Launch Med Cold Launch Count TTI Med TTI Count +----------- -------- --------------- ----------------- ------- --------- +1.2.0 iOS 0.35s 110 1.32s 90 +1.1.0 Android 0.25s 120 1.12s 100 " +`); + }); + + it('shows - for metrics with missing values for versions', () => { + const metricsMap: ObserveMetricsMap = new Map(); + const key = makeMetricsKey('2.0.0', AppPlatform.Ios); + metricsMap.set( + key, + new Map([ + [ + 'expo.app_startup.cold_launch_time', + makeMetricValueWithDefaults({ median: 0.25, eventCount: 80 }), + ], + ]) + ); + + const output = buildObserveMetricsTable( + metricsMap, + ['expo.app_startup.cold_launch_time', 'expo.app_startup.tti'], + ['median', 'eventCount'] + ); + + expect(output).toMatchInlineSnapshot(` +"App Version Platform Cold Launch Med Cold Launch Count TTI Med TTI Count +----------- -------- --------------- ----------------- ------- --------- +2.0.0 iOS 0.25s 80 - - " +`); + }); + + it('returns message when no metrics data found', () => { + const output = buildObserveMetricsTable( + new Map(), + ['expo.app_startup.cold_launch_time', 'expo.app_startup.tti'], + ['median', 'eventCount'] + ); + expect(output).toMatchInlineSnapshot(`"No metrics data found."`); + }); +}); + +describe(buildObserveMetricsJson, () => { + it('produces JSON with all stats per metric', () => { + const metricsMap: ObserveMetricsMap = new Map(); + const key = makeMetricsKey('1.0.0', AppPlatform.Ios); + metricsMap.set( + key, + new Map([ + ['expo.app_startup.tti', makeMetricValueWithDefaults({ median: 0.12, eventCount: 90 })], + ]) + ); + + const result = buildObserveMetricsJson( + metricsMap, + ['expo.app_startup.tti'], + ['min', 'median', 'max', 'p99'] + ); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual({ + appVersion: '1.0.0', + platform: AppPlatform.Ios, + metrics: { + 'expo.app_startup.tti': { + min: 0.1, + median: 0.12, + max: 1.1, + p99: 1.0, + }, + }, + }); + }); + + it('produces null values for metrics missing from a version that has other metric data', () => { + const metricsMap: ObserveMetricsMap = new Map(); + const key = makeMetricsKey('3.0.0', AppPlatform.Android); + metricsMap.set( + key, + new Map([ + [ + 'expo.app_startup.cold_launch_time', + makeMetricValueWithDefaults({ median: 0.25, eventCount: 80 }), + ], + ]) + ); + + const result = buildObserveMetricsJson( + metricsMap, + ['expo.app_startup.cold_launch_time', 'expo.app_startup.tti'], + ['median', 'eventCount'] + ); + + expect(result[0].metrics).toEqual({ + 'expo.app_startup.cold_launch_time': { + median: 0.25, + eventCount: 80, + }, + 'expo.app_startup.tti': { + median: null, + eventCount: null, + }, + }); + }); +}); + +describe(makeMetricsKey, () => { + it('creates a key from version and platform', () => { + expect(makeMetricsKey('1.0.0', AppPlatform.Ios)).toBe('1.0.0:IOS'); + expect(makeMetricsKey('2.0.0', AppPlatform.Android)).toBe('2.0.0:ANDROID'); + }); +}); + +describe(resolveStatKey, () => { + it('resolves canonical stat names', () => { + expect(resolveStatKey('min')).toBe('min'); + expect(resolveStatKey('max')).toBe('max'); + expect(resolveStatKey('median')).toBe('median'); + expect(resolveStatKey('average')).toBe('average'); + expect(resolveStatKey('p80')).toBe('p80'); + expect(resolveStatKey('p90')).toBe('p90'); + expect(resolveStatKey('p99')).toBe('p99'); + expect(resolveStatKey('eventCount')).toBe('eventCount'); + }); + + it('resolves short aliases', () => { + expect(resolveStatKey('med')).toBe('median'); + expect(resolveStatKey('avg')).toBe('average'); + expect(resolveStatKey('count')).toBe('eventCount'); + expect(resolveStatKey('event_count')).toBe('eventCount'); + }); + + it('throws on unknown stat', () => { + expect(() => resolveStatKey('unknown')).toThrow('Unknown statistic: "unknown"'); + }); +}); + +describe('custom stats parameter', () => { + it('table renders only selected stats', () => { + const metricsMap: ObserveMetricsMap = new Map(); + const key = makeMetricsKey('1.0.0', AppPlatform.Ios); + metricsMap.set( + key, + new Map([ + [ + 'expo.app_startup.tti', + { + min: 0.01, + median: 0.1, + max: 0.5, + average: null, + p80: null, + p90: null, + p99: 0.9, + eventCount: 42, + }, + ], + ]) + ); + + const output = buildObserveMetricsTable( + metricsMap, + ['expo.app_startup.tti'], + ['p99', 'eventCount'] + ); + + expect(output).toContain('TTI P99'); + expect(output).toContain('TTI Count'); + expect(output).toContain('0.90s'); + expect(output).toContain('42'); + expect(output).not.toContain('TTI Min'); + expect(output).not.toContain('TTI Med'); + expect(output).not.toContain('TTI Max'); + }); + + it("table formats eventCount as integer without 's' suffix", () => { + const metricsMap: ObserveMetricsMap = new Map(); + const key = makeMetricsKey('1.0.0', AppPlatform.Ios); + metricsMap.set( + key, + new Map([ + [ + 'expo.app_startup.tti', + { + min: 0.01, + median: 0.1, + max: 0.5, + average: null, + p80: null, + p90: null, + p99: null, + eventCount: 100, + }, + ], + ]) + ); + + const output = buildObserveMetricsTable(metricsMap, ['expo.app_startup.tti'], ['eventCount']); + + expect(output).toContain('100'); + expect(output).not.toContain('100s'); + expect(output).not.toContain('100.00s'); + }); + + it('JSON includes only selected stats', () => { + const metricsMap: ObserveMetricsMap = new Map(); + const key = makeMetricsKey('1.0.0', AppPlatform.Ios); + metricsMap.set( + key, + new Map([ + [ + 'expo.app_startup.tti', + { + min: 0.01, + median: 0.1, + max: 0.5, + average: 0.15, + p80: 0.3, + p90: 0.4, + p99: 0.9, + eventCount: 42, + }, + ], + ]) + ); + + const result = buildObserveMetricsJson( + metricsMap, + ['expo.app_startup.tti'], + ['p90', 'eventCount'] + ); + + expect(result[0].metrics['expo.app_startup.tti']).toEqual({ + p90: 0.4, + eventCount: 42, + }); + }); +}); diff --git a/packages/eas-cli/src/observe/__tests__/metricNames.test.ts b/packages/eas-cli/src/observe/__tests__/metricNames.test.ts new file mode 100644 index 0000000000..f989d22d70 --- /dev/null +++ b/packages/eas-cli/src/observe/__tests__/metricNames.test.ts @@ -0,0 +1,52 @@ +import { getMetricDisplayName, resolveMetricName } from '../metricNames'; + +describe(resolveMetricName, () => { + it('resolves short alias "tti" to full metric name', () => { + expect(resolveMetricName('tti')).toBe('expo.app_startup.tti'); + }); + + it('resolves short alias "ttr" to full metric name', () => { + expect(resolveMetricName('ttr')).toBe('expo.app_startup.ttr'); + }); + + it('resolves short alias "cold_launch" to full metric name', () => { + expect(resolveMetricName('cold_launch')).toBe('expo.app_startup.cold_launch_time'); + }); + + it('resolves short alias "warm_launch" to full metric name', () => { + expect(resolveMetricName('warm_launch')).toBe('expo.app_startup.warm_launch_time'); + }); + + it('resolves short alias "bundle_load" to full metric name', () => { + expect(resolveMetricName('bundle_load')).toBe('expo.app_startup.bundle_load_time'); + }); + + it('passes through full metric names unchanged', () => { + expect(resolveMetricName('expo.app_startup.tti')).toBe('expo.app_startup.tti'); + expect(resolveMetricName('expo.app_startup.cold_launch_time')).toBe( + 'expo.app_startup.cold_launch_time' + ); + }); + + it('throws on unknown alias', () => { + expect(() => resolveMetricName('unknown_metric')).toThrow('Unknown metric: "unknown_metric"'); + }); + + it('passes through dot-containing custom metric names', () => { + expect(resolveMetricName('custom.metric.name')).toBe('custom.metric.name'); + }); +}); + +describe(getMetricDisplayName, () => { + it('returns short display name for known metrics', () => { + expect(getMetricDisplayName('expo.app_startup.cold_launch_time')).toBe('Cold Launch'); + expect(getMetricDisplayName('expo.app_startup.warm_launch_time')).toBe('Warm Launch'); + expect(getMetricDisplayName('expo.app_startup.tti')).toBe('TTI'); + expect(getMetricDisplayName('expo.app_startup.ttr')).toBe('TTR'); + expect(getMetricDisplayName('expo.app_startup.bundle_load_time')).toBe('Bundle Load'); + }); + + it('returns the full metric name for unknown metrics', () => { + expect(getMetricDisplayName('custom.metric.name')).toBe('custom.metric.name'); + }); +}); diff --git a/packages/eas-cli/src/observe/__tests__/utils.test.ts b/packages/eas-cli/src/observe/__tests__/utils.test.ts new file mode 100644 index 0000000000..1365d3b491 --- /dev/null +++ b/packages/eas-cli/src/observe/__tests__/utils.test.ts @@ -0,0 +1,17 @@ +import { validateDateFlag } from '../utils'; + +describe(validateDateFlag, () => { + it('throws on invalid --start date', () => { + expect(() => validateDateFlag('not-a-date', '--start')).toThrow( + 'Invalid --start date: "not-a-date"' + ); + }); + + it('throws on invalid --end date', () => { + expect(() => validateDateFlag('also-bad', '--end')).toThrow('Invalid --end date: "also-bad"'); + }); + + it('accepts valid ISO date in --start', () => { + expect(() => validateDateFlag('2025-01-01', '--start')).not.toThrow(); + }); +}); diff --git a/packages/eas-cli/src/observe/fetchMetrics.ts b/packages/eas-cli/src/observe/fetchMetrics.ts new file mode 100644 index 0000000000..78b54f8dc8 --- /dev/null +++ b/packages/eas-cli/src/observe/fetchMetrics.ts @@ -0,0 +1,91 @@ +import { ExpoGraphqlClient } from '../commandUtils/context/contextUtils/createGraphqlClient'; +import { AppObservePlatform, AppObserveVersionMarker, AppPlatform } from '../graphql/generated'; +import { ObserveQuery } from '../graphql/queries/ObserveQuery'; +import Log from '../log'; +import type { MetricValues, ObserveMetricsMap } from './metrics.types'; +import { makeMetricsKey } from './utils'; + +const appPlatformToObservePlatform: Record = { + [AppPlatform.Android]: AppObservePlatform.Android, + [AppPlatform.Ios]: AppObservePlatform.Ios, +}; + +const observePlatformToAppPlatform: Record = { + [AppObservePlatform.Android]: AppPlatform.Android, + [AppObservePlatform.Ios]: AppPlatform.Ios, +}; + +interface ObserveQueryResult { + metricName: string; + platform: AppObservePlatform; + markers: AppObserveVersionMarker[]; +} + +export async function fetchObserveMetricsAsync( + graphqlClient: ExpoGraphqlClient, + appId: string, + metricNames: string[], + platforms: AppPlatform[], + startTime: string, + endTime: string +): Promise { + const observeQueries: Promise[] = []; + + // TODO(@ubax): add support for fetching multiple metrics and platforms in a single query + for (const metricName of metricNames) { + for (const appPlatform of platforms) { + const observePlatform = appPlatformToObservePlatform[appPlatform]; + observeQueries.push( + ObserveQuery.timeSeriesVersionMarkersAsync(graphqlClient, { + appId, + metricName, + platform: observePlatform, + startTime, + endTime, + }) + .then(markers => ({ + metricName, + platform: observePlatform, + markers, + })) + .catch(error => { + Log.warn( + `Failed to fetch observe data for metric "${metricName}" on ${observePlatform}: ${error.message}` + ); + return null; + }) + ); + } + } + + const observeResults = await Promise.all(observeQueries); + + const metricsMap: ObserveMetricsMap = new Map(); + + for (const result of observeResults) { + if (!result) { + continue; + } + const { metricName, platform, markers } = result; + const appPlatform = observePlatformToAppPlatform[platform]; + for (const marker of markers) { + const key = makeMetricsKey(marker.appVersion, appPlatform); + if (!metricsMap.has(key)) { + metricsMap.set(key, new Map()); + } + const values: MetricValues = { + min: marker.statistics.min, + max: marker.statistics.max, + median: marker.statistics.median, + average: marker.statistics.average, + p80: marker.statistics.p80, + p90: marker.statistics.p90, + p99: marker.statistics.p99, + eventCount: marker.eventCount, + }; + metricsMap.get(key)!.set(metricName, values); + } + } + + return metricsMap; +} diff --git a/packages/eas-cli/src/observe/formatMetrics.ts b/packages/eas-cli/src/observe/formatMetrics.ts new file mode 100644 index 0000000000..1a834bc73b --- /dev/null +++ b/packages/eas-cli/src/observe/formatMetrics.ts @@ -0,0 +1,143 @@ +import chalk from 'chalk'; + +import { EasCommandError } from '../commandUtils/errors'; +import { AppPlatform } from '../graphql/generated'; +import { appPlatformDisplayNames } from '../platform'; +import { getMetricDisplayName } from './metricNames'; +import { parseMetricsKey } from './utils'; +import type { ObserveMetricsMap } from './metrics.types'; + +export type StatisticKey = + | 'min' + | 'max' + | 'median' + | 'average' + | 'p80' + | 'p90' + | 'p99' + | 'eventCount'; + +export const STAT_ALIASES: Record = { + min: 'min', + max: 'max', + med: 'median', + median: 'median', + avg: 'average', + average: 'average', + p80: 'p80', + p90: 'p90', + p99: 'p99', + count: 'eventCount', + event_count: 'eventCount', + eventCount: 'eventCount', +}; + +export const STAT_DISPLAY_NAMES: Record = { + min: 'Min', + max: 'Max', + median: 'Med', + average: 'Avg', + p80: 'P80', + p90: 'P90', + p99: 'P99', + eventCount: 'Count', +}; + +/** + * Resolves a user-provided stat alias (e.g. "avg", "med", "count") to graphql supported StatisticKey. + */ +export function resolveStatKey(input: string): StatisticKey { + const resolved = STAT_ALIASES[input]; + if (resolved) { + return resolved; + } + throw new EasCommandError( + `Unknown statistic: "${input}". Valid options: ${Object.keys(STAT_ALIASES).join(', ')}` + ); +} + +function formatStatValue(stat: StatisticKey, value: number | null | undefined): string { + if (value == null) { + return '-'; + } + if (stat === 'eventCount') { + return String(value); + } + return `${value.toFixed(2)}s`; +} + +export type MetricValuesJson = Partial>; + +export interface ObserveMetricsVersionResult { + appVersion: string; + platform: AppPlatform; + metrics: Record; +} + +export function buildObserveMetricsJson( + metricsMap: ObserveMetricsMap, + metricNames: string[], + stats: StatisticKey[] +): ObserveMetricsVersionResult[] { + const results: ObserveMetricsVersionResult[] = []; + + for (const [key, versionMetrics] of metricsMap) { + const { appVersion, platform } = parseMetricsKey(key); + + const metrics: Record = {}; + for (const metricName of metricNames) { + const values = versionMetrics.get(metricName); + const statValues: MetricValuesJson = {}; + for (const stat of stats) { + statValues[stat] = values?.[stat] ?? null; + } + metrics[metricName] = statValues; + } + + results.push({ appVersion, platform, metrics }); + } + + return results; +} + +export function buildObserveMetricsTable( + metricsMap: ObserveMetricsMap, + metricNames: string[], + stats: StatisticKey[] +): string { + const results = buildObserveMetricsJson(metricsMap, metricNames, stats); + + if (results.length === 0) { + return chalk.yellow('No metrics data found.'); + } + + const fixedHeaders = ['App Version', 'Platform']; + const metricHeaders: string[] = []; + for (const m of metricNames) { + const name = getMetricDisplayName(m); + for (const stat of stats) { + metricHeaders.push(`${name} ${STAT_DISPLAY_NAMES[stat]}`); + } + } + const headers = [...fixedHeaders, ...metricHeaders]; + + const rows: string[][] = results.map(result => { + const metricCells: string[] = []; + for (const m of metricNames) { + const values = result.metrics[m]; + for (const stat of stats) { + metricCells.push(formatStatValue(stat, values?.[stat] ?? null)); + } + } + + return [result.appVersion, appPlatformDisplayNames[result.platform], ...metricCells]; + }); + + const colWidths = headers.map((h, i) => Math.max(h.length, ...rows.map(r => r[i].length))); + + const headerLine = headers.map((h, i) => h.padEnd(colWidths[i])).join(' '); + const separatorLine = colWidths.map(w => '-'.repeat(w)).join(' '); + const dataLines = rows.map(row => row.map((cell, i) => cell.padEnd(colWidths[i])).join(' ')); + + return [chalk.bold(headerLine), separatorLine, ...dataLines].join('\n'); +} diff --git a/packages/eas-cli/src/observe/metricNames.ts b/packages/eas-cli/src/observe/metricNames.ts new file mode 100644 index 0000000000..8d591a3cd2 --- /dev/null +++ b/packages/eas-cli/src/observe/metricNames.ts @@ -0,0 +1,35 @@ +import { EasCommandError } from '../commandUtils/errors'; + +export const METRIC_ALIASES: Record = { + tti: 'expo.app_startup.tti', + ttr: 'expo.app_startup.ttr', + cold_launch: 'expo.app_startup.cold_launch_time', + warm_launch: 'expo.app_startup.warm_launch_time', + bundle_load: 'expo.app_startup.bundle_load_time', +}; + +const KNOWN_FULL_NAMES = new Set(Object.values(METRIC_ALIASES)); + +export const METRIC_SHORT_NAMES: Record = { + 'expo.app_startup.cold_launch_time': 'Cold Launch', + 'expo.app_startup.warm_launch_time': 'Warm Launch', + 'expo.app_startup.tti': 'TTI', + 'expo.app_startup.ttr': 'TTR', + 'expo.app_startup.bundle_load_time': 'Bundle Load', +}; + +export function resolveMetricName(input: string): string { + if (METRIC_ALIASES[input]) { + return METRIC_ALIASES[input]; + } + if (KNOWN_FULL_NAMES.has(input) || input.includes('.')) { + return input; + } + throw new EasCommandError( + `Unknown metric: "${input}". Use a full metric name (e.g. expo.app_startup.tti) or a short alias: ${Object.keys(METRIC_ALIASES).join(', ')}` + ); +} + +export function getMetricDisplayName(metricName: string): string { + return METRIC_SHORT_NAMES[metricName] ?? metricName; +} diff --git a/packages/eas-cli/src/observe/metrics.types.ts b/packages/eas-cli/src/observe/metrics.types.ts new file mode 100644 index 0000000000..58e1577a8c --- /dev/null +++ b/packages/eas-cli/src/observe/metrics.types.ts @@ -0,0 +1,21 @@ +import type { AppPlatform } from '../graphql/generated'; + +export interface MetricValues { + min: number | null | undefined; + max: number | null | undefined; + median: number | null | undefined; + average: number | null | undefined; + p80: number | null | undefined; + p90: number | null | undefined; + p99: number | null | undefined; + eventCount: number | null | undefined; +} + +/** + * ObserveMetricsKey encodes an app version + platform pair into a single string key. + * This is needed because the observe API returns metrics per (version, platform) combination, + * and we use a flat Map + */ +export type ObserveMetricsKey = `${string}:${AppPlatform}`; + +export type ObserveMetricsMap = Map>; diff --git a/packages/eas-cli/src/observe/utils.ts b/packages/eas-cli/src/observe/utils.ts new file mode 100644 index 0000000000..081aa568b9 --- /dev/null +++ b/packages/eas-cli/src/observe/utils.ts @@ -0,0 +1,27 @@ +import { EasCommandError } from '../commandUtils/errors'; +import { AppPlatform } from '../graphql/generated'; +import type { ObserveMetricsKey } from './metrics.types'; + +export function makeMetricsKey(appVersion: string, platform: AppPlatform): ObserveMetricsKey { + return `${appVersion}:${platform}`; +} + +export function parseMetricsKey(key: ObserveMetricsKey): { + appVersion: string; + platform: AppPlatform; +} { + const lastColon = key.lastIndexOf(':'); + return { + appVersion: key.slice(0, lastColon), + platform: key.slice(lastColon + 1) as AppPlatform, + }; +} + +export function validateDateFlag(value: string, flagName: string): void { + const parsed = new Date(value); + if (isNaN(parsed.getTime())) { + throw new EasCommandError( + `Invalid ${flagName} date: "${value}". Provide a valid ISO 8601 date (e.g. 2025-01-01).` + ); + } +}