Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ This is the log of notable changes to EAS CLI and related packages.

### 🎉 New features

- Add `eas observe:metrics` command for monitoring app performance metrics. ([#3401](https://github.com/expo/eas-cli/pull/3401) by [@ubax](https://github.com/ubax))

### 🐛 Bug fixes

### 🧹 Chores
Expand Down
3 changes: 3 additions & 0 deletions packages/eas-cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,9 @@
"metadata": {
"description": "manage store configuration"
},
"observe": {
"description": "monitor app performance metrics"
},
"project": {
"description": "manage project"
},
Expand Down
189 changes: 189 additions & 0 deletions packages/eas-cli/src/commands/observe/__tests__/metrics.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
import { Config } from '@oclif/core';

import { ExpoGraphqlClient } from '../../../commandUtils/context/contextUtils/createGraphqlClient';
import { AppPlatform } from '../../../graphql/generated';
import { fetchObserveMetricsAsync } from '../../../observe/fetchMetrics';
import { buildObserveMetricsJson, buildObserveMetricsTable } from '../../../observe/formatMetrics';
import { enableJsonOutput, printJsonOnlyOutput } from '../../../utils/json';
import ObserveMetrics from '../metrics';

jest.mock('../../../observe/fetchMetrics', () => ({
fetchObserveMetricsAsync: jest.fn(),
}));
jest.mock('../../../observe/formatMetrics', () => ({
...jest.requireActual('../../../observe/formatMetrics'),
buildObserveMetricsTable: jest.fn().mockReturnValue('table'),
buildObserveMetricsJson: jest.fn().mockReturnValue([]),
}));
jest.mock('../../../log');
jest.mock('../../../utils/json');

const mockFetchObserveMetricsAsync = jest.mocked(fetchObserveMetricsAsync);
const mockBuildObserveMetricsTable = jest.mocked(buildObserveMetricsTable);
const mockBuildObserveMetricsJson = jest.mocked(buildObserveMetricsJson);
const mockEnableJsonOutput = jest.mocked(enableJsonOutput);
const mockPrintJsonOnlyOutput = jest.mocked(printJsonOnlyOutput);

describe(ObserveMetrics, () => {
const graphqlClient = {} as any as ExpoGraphqlClient;
const mockConfig = {} as unknown as Config;
const projectId = 'test-project-id';

beforeEach(() => {
jest.clearAllMocks();
mockFetchObserveMetricsAsync.mockResolvedValue(new Map());
});

function createCommand(argv: string[]): ObserveMetrics {
const command = new ObserveMetrics(argv, mockConfig);
// @ts-expect-error getContextAsync is a protected method
jest.spyOn(command, 'getContextAsync').mockReturnValue({
projectId,
loggedIn: { graphqlClient },
});
return command;
}

it('fetches metrics with default parameters (both platforms)', async () => {
const now = new Date('2025-06-15T12:00:00.000Z');
jest.useFakeTimers({ now });

const command = createCommand([]);
await command.runAsync();

expect(mockFetchObserveMetricsAsync).toHaveBeenCalledTimes(1);
const platforms = mockFetchObserveMetricsAsync.mock.calls[0][3];
expect(platforms).toEqual([AppPlatform.Android, AppPlatform.Ios]);

jest.useRealTimers();
});

it('queries only Android when --platform android is passed', async () => {
const command = createCommand(['--platform', 'android']);
await command.runAsync();

const platforms = mockFetchObserveMetricsAsync.mock.calls[0][3];
expect(platforms).toEqual([AppPlatform.Android]);
});

it('queries only iOS when --platform ios is passed', async () => {
const command = createCommand(['--platform', 'ios']);
await command.runAsync();

const platforms = mockFetchObserveMetricsAsync.mock.calls[0][3];
expect(platforms).toEqual([AppPlatform.Ios]);
});

it('resolves --metric aliases before passing to fetchObserveMetricsAsync', async () => {
const command = createCommand(['--metric', 'tti', '--metric', 'cold_launch']);
await command.runAsync();

const metricNames = mockFetchObserveMetricsAsync.mock.calls[0][2];
expect(metricNames).toEqual(['expo.app_startup.tti', 'expo.app_startup.cold_launch_time']);
});

it('uses default time range (60 days back) when no --start/--end flags', async () => {
const now = new Date('2025-06-15T12:00:00.000Z');
jest.useFakeTimers({ now });

const command = createCommand([]);
await command.runAsync();

const startTime = mockFetchObserveMetricsAsync.mock.calls[0][4];
const endTime = mockFetchObserveMetricsAsync.mock.calls[0][5];
expect(endTime).toBe('2025-06-15T12:00:00.000Z');
expect(startTime).toBe('2025-04-16T12:00:00.000Z');

jest.useRealTimers();
});

it('uses explicit --start and --end when provided', async () => {
const command = createCommand([
'--start',
'2025-01-01T00:00:00.000Z',
'--end',
'2025-02-01T00:00:00.000Z',
]);
await command.runAsync();

const startTime = mockFetchObserveMetricsAsync.mock.calls[0][4];
const endTime = mockFetchObserveMetricsAsync.mock.calls[0][5];
expect(startTime).toBe('2025-01-01T00:00:00.000Z');
expect(endTime).toBe('2025-02-01T00:00:00.000Z');
});

it('passes resolved --stat flags to buildObserveMetricsTable', async () => {
const command = createCommand(['--stat', 'p90', '--stat', 'count']);
await command.runAsync();

expect(mockBuildObserveMetricsTable).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [
'p90',
'eventCount',
]);
});

it('deduplicates --stat flags that resolve to the same key', async () => {
const command = createCommand(['--stat', 'med', '--stat', 'median']);
await command.runAsync();

expect(mockBuildObserveMetricsTable).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [
'median',
]);
});

it('uses --days-from-now to compute start/end time range', async () => {
const now = new Date('2025-06-15T12:00:00.000Z');
jest.useFakeTimers({ now });

const command = createCommand(['--days-from-now', '7']);
await command.runAsync();

const startTime = mockFetchObserveMetricsAsync.mock.calls[0][4];
const endTime = mockFetchObserveMetricsAsync.mock.calls[0][5];
expect(endTime).toBe('2025-06-15T12:00:00.000Z');
expect(startTime).toBe('2025-06-08T12:00:00.000Z');

jest.useRealTimers();
});

it('rejects --days-from-now combined with --start', async () => {
const command = createCommand(['--days-from-now', '7', '--start', '2025-01-01T00:00:00.000Z']);

await expect(command.runAsync()).rejects.toThrow();
});

it('rejects --days-from-now combined with --end', async () => {
const command = createCommand(['--days-from-now', '7', '--end', '2025-02-01T00:00:00.000Z']);

await expect(command.runAsync()).rejects.toThrow();
});

it('uses default stats when --stat is not provided', async () => {
const command = createCommand([]);
await command.runAsync();

expect(mockBuildObserveMetricsTable).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [
'median',
'eventCount',
]);
});

it('passes resolved --stat flags to buildObserveMetricsJson when --json is used', async () => {
const command = createCommand([
'--json',
'--non-interactive',
'--stat',
'min',
'--stat',
'avg',
]);
await command.runAsync();

expect(mockEnableJsonOutput).toHaveBeenCalled();
expect(mockBuildObserveMetricsJson).toHaveBeenCalledWith(expect.any(Map), expect.any(Array), [
'min',
'average',
]);
expect(mockPrintJsonOnlyOutput).toHaveBeenCalled();
});
});
143 changes: 143 additions & 0 deletions packages/eas-cli/src/commands/observe/metrics.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
import { Flags } from '@oclif/core';

import EasCommand from '../../commandUtils/EasCommand';
import { EasNonInteractiveAndJsonFlags } from '../../commandUtils/flags';
import { AppPlatform } from '../../graphql/generated';
import Log from '../../log';
import { fetchObserveMetricsAsync } from '../../observe/fetchMetrics';
import {
StatisticKey,
buildObserveMetricsJson,
buildObserveMetricsTable,
resolveStatKey,
} from '../../observe/formatMetrics';
import { validateDateFlag } from '../../observe/utils';
import { resolveMetricName } from '../../observe/metricNames';
import { enableJsonOutput, printJsonOnlyOutput } from '../../utils/json';

const DEFAULT_METRICS = [
'expo.app_startup.cold_launch_time',
'expo.app_startup.warm_launch_time',
'expo.app_startup.tti',
'expo.app_startup.ttr',
'expo.app_startup.bundle_load_time',
];

const DEFAULT_DAYS_BACK = 60;

const TABLE_FORMAT_DEFAULT_STATS: StatisticKey[] = ['median', 'eventCount'];
const JSON_FORMAT_DEFAULT_STATS: StatisticKey[] = [
'min',
'median',
'max',
'average',
'p80',
'p90',
'p99',
'eventCount',
];

export default class ObserveMetrics extends EasCommand {
static override description = 'display app performance metrics grouped by app version';

static override flags = {
platform: Flags.enum<'android' | 'ios'>({
description: 'Filter by platform',
options: ['android', 'ios'],
}),
metric: Flags.string({
description:
'Metric name to display (can be specified multiple times). Supports aliases: tti, ttr, cold_launch, warm_launch, bundle_load',
multiple: true,
}),
stat: Flags.string({
description:
'Statistic to display per metric (can be specified multiple times). Options: min, max, med, avg, p80, p90, p99, count',
multiple: true,
}),
start: Flags.string({
description: 'Start of time range for metrics data (ISO date).',
exclusive: ['days-from-now'],
}),
end: Flags.string({
description: 'End of time range for metrics data (ISO date).',
exclusive: ['days-from-now'],
}),
'days-from-now': Flags.integer({
description: 'Show metrics from the last N days (mutually exclusive with --start/--end)',
min: 1,
exclusive: ['start', 'end'],
}),
...EasNonInteractiveAndJsonFlags,
};

static override contextDefinition = {
...this.ContextOptions.ProjectId,
...this.ContextOptions.LoggedIn,
};

async runAsync(): Promise<void> {
const { flags } = await this.parse(ObserveMetrics);
const {
projectId,
loggedIn: { graphqlClient },
} = await this.getContextAsync(ObserveMetrics, {
nonInteractive: flags['non-interactive'],
});

if (flags.json) {
enableJsonOutput();
} else {
Log.warn('EAS Observe is in preview and subject to breaking changes.');
}

if (flags.start) {
validateDateFlag(flags.start, '--start');
}
if (flags.end) {
validateDateFlag(flags.end, '--end');
}

const metricNames = flags.metric?.length
? flags.metric.map(resolveMetricName)
: DEFAULT_METRICS;

let startTime: string;
let endTime: string;

if (flags['days-from-now']) {
endTime = new Date().toISOString();
startTime = new Date(Date.now() - flags['days-from-now'] * 24 * 60 * 60 * 1000).toISOString();
} else {
endTime = flags.end ?? new Date().toISOString();
startTime =
flags.start ?? new Date(Date.now() - DEFAULT_DAYS_BACK * 24 * 60 * 60 * 1000).toISOString();
}

const platforms: AppPlatform[] = flags.platform
? [flags.platform === 'android' ? AppPlatform.Android : AppPlatform.Ios]
: [AppPlatform.Android, AppPlatform.Ios];

const metricsMap = await fetchObserveMetricsAsync(
graphqlClient,
projectId,
metricNames,
platforms,
startTime,
endTime
);

const argumentsStat = flags.stat?.length
? Array.from(new Set(flags.stat.map(resolveStatKey)))
: undefined;

if (flags.json) {
const stats: StatisticKey[] = argumentsStat ?? JSON_FORMAT_DEFAULT_STATS;
printJsonOnlyOutput(buildObserveMetricsJson(metricsMap, metricNames, stats));
} else {
const stats: StatisticKey[] = argumentsStat ?? TABLE_FORMAT_DEFAULT_STATS;
Log.addNewLineIfNone();
Log.log(buildObserveMetricsTable(metricsMap, metricNames, stats));
}
}
}
Loading