diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index e40e0ff..e4f909c 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -8,9 +8,20 @@ on: jobs: build: runs-on: ubuntu-latest - if: "! contains(toJSON(github.event.pull_request.labels.*.name), 'ci-skip')" + if: ${{ !contains(toJSON(github.event.pull_request.labels.*.name), 'ci-skip') }} timeout-minutes: 10 + services: + redis: + image: redis:6 + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + steps: - uses: actions/checkout@v2 - name: Use Node.js ${{ matrix.node-version }} @@ -34,3 +45,5 @@ jobs: env: NODE_ENV: test CI: true + REDIS_HOST: localhost + REDIS_PORT: 6379 diff --git a/src/FastCache.spec.ts b/src/FastCache.spec.ts index a1e10a9..cf9c8f6 100644 --- a/src/FastCache.spec.ts +++ b/src/FastCache.spec.ts @@ -274,4 +274,171 @@ describe('FastCache', () => { }); }); }); + + // Boundary value tests and overflow tests + describe('boundary and overflow tests', () => { + test('should handle empty string keys', async () => { + await cache.set('', 'empty-key-value'); + const value = await cache.get(''); + expect(value).toBe('empty-key-value'); + }); + + test('should handle very long keys', async () => { + const longKey = 'a'.repeat(10000); // 10K character key + await cache.set(longKey, 'long-key-value'); + const value = await cache.get(longKey); + expect(value).toBe('long-key-value'); + }); + + test('should handle very long values', async () => { + const longValue = 'a'.repeat(1000000); // 1M character value + await cache.set('longValue', longValue); + const value = await cache.get('longValue'); + expect(value).toBe(longValue); + }); + + test('should handle setting null and undefined values', async () => { + await cache.set('nullValue', null as any); + const nullValue = await cache.get('nullValue'); + expect(nullValue).toBe(''); + + await cache.set('undefinedValue', undefined as any); + const undefinedValue = await cache.get('undefinedValue'); + expect(undefinedValue).toBe(''); + }); + + test('should handle setting and retrieving special characters', async () => { + const specialChars = '!@#$%^&*()_+{}[]|\\:;"\'<>,.?/~`'; + await cache.set('specialChars', specialChars); + const value = await cache.get('specialChars'); + expect(value).toBe(specialChars); + }); + + test('should handle setting and retrieving emoji', async () => { + const emoji = '๐Ÿ˜€๐Ÿ™Œ๐Ÿ‘๐ŸŽ‰๐Ÿ”ฅ๐Ÿš€'; + await cache.set('emoji', emoji); + const value = await cache.get('emoji'); + expect(value).toBe(emoji); + }); + + test('should handle JSON serialization errors', async () => { + // Create object with circular reference + const circularObj: any = { key: 'value' }; + circularObj.self = circularObj; + + // Verify that withCache handles serialization errors gracefully + const result = await cache.withCache('circularObj', async () => { + return 'fallback value'; + }); + + expect(result).toBe('fallback value'); + }); + + test('should handle invalid JSON when deserializing', async () => { + // Directly set invalid JSON + await client.set('invalidJson', '{invalid"json:data}'); + + // Try to get via cache + const result = await cache.get('invalidJson'); + + // We expect a string return since it couldn't be parsed + expect(result).toBe('{invalid"json:data}'); + }); + + test('should handle concurrent operations on the same key', async () => { + // Create multiple promises that try to set the same key + const promises: Promise[] = []; + for (let i = 0; i < 10; i++) { + promises.push(cache.set('concurrent', `value-${i}`)); + } + + // Wait for all promises to resolve + await Promise.all(promises); + + // Get the final value + const finalValue = await cache.get('concurrent'); + expect(finalValue).toBeDefined(); + }); + + test('should handle extremely large list operations', async () => { + const list = cache.list('largeList'); + const large = 10000; + + // Add many items + for (let i = 0; i < large; i++) { + await list.push(`item-${i}`); + } + + // Check length + const length = await list.length(); + expect(length).toBe(large); + + // Check some values + const items = await list.getAll(large - 5, large - 1); + expect(items.length).toBe(5); + expect(items[0]).toBe(`item-${large - 5}`); + }); + + test('should handle extremely large map operations', async () => { + const map = cache.map('largeMap'); + const large = 1000; + + // Add many key-value pairs + for (let i = 0; i < large; i++) { + await map.set(`key-${i}`, `value-${i}`); + } + + // Check length + const length = await map.length(); + expect(length).toBe(large); + + // Check some values + const fields = Array.from({ length: 5 }, (_, i) => `key-${i}`); + const values = await map.getAll(fields); + expect(values.length).toBe(5); + expect(values[0]).toBe('value-0'); + }); + + test('should handle extremely large set operations', async () => { + const set = cache.setOf('largeSet'); + const large = 1000; + + // Add many items in batches + const batchSize = 100; + for (let i = 0; i < large; i += batchSize) { + const batch = Array.from({ length: batchSize }, (_, j) => `item-${i + j}`); + await set.add(...batch); + } + + // Check length + const length = await set.length(); + expect(length).toBe(large); + + // Check some values + const containsFirst = await set.contains('item-0'); + expect(containsFirst).toBeTruthy(); + + const containsLast = await set.contains(`item-${large - 1}`); + expect(containsLast).toBeTruthy(); + }); + + test('should handle flush with extremely large number of keys', async () => { + // Create many keys with the same prefix + const keyCount = 1000; + const prefix = 'massive-flush-test:'; + + for (let i = 0; i < keyCount; i++) { + await cache.set(`${prefix}${i}`, `value-${i}`); + } + + // Flush all keys with pattern + await cache.flush(`${prefix}*`); + + // Verify keys are gone + for (let i = 0; i < 10; i++) { + const value = await cache.get(`${prefix}${i}`); + expect(value).toBeNull(); + } + }); + }); }); diff --git a/src/InMemoryCache.spec.ts b/src/InMemoryCache.spec.ts index ff9d815..d0c788b 100644 --- a/src/InMemoryCache.spec.ts +++ b/src/InMemoryCache.spec.ts @@ -169,4 +169,127 @@ describe('LocalCache', () => { expect(data).toBeUndefined(); }); }); + + // Boundary value tests and overflow tests + describe('boundary and overflow tests', () => { + it('should handle zero TTL value', async () => { + const zeroTtlCache = new InMemoryCache({ ttlInSec: 0 }); + zeroTtlCache.setCache('foo', { foo: 123 }); + + // Data should be immediately invalidated with zero TTL + await setTimeout(10); + + const data = zeroTtlCache.getCache('foo'); + expect(data).toBeUndefined(); + }); + + it('should handle negative TTL value by treating it as zero', async () => { + const negativeTtlCache = new InMemoryCache({ ttlInSec: -1 }); + negativeTtlCache.setCache('foo', { foo: 123 }); + + // Data should be immediately invalidated with negative TTL + await setTimeout(10); + + const data = negativeTtlCache.getCache('foo'); + expect(data).toBeUndefined(); + }); + + it('should handle extremely large TTL value', async () => { + // ์‹ค์ œ MAX_SAFE_INTEGER๋Š” ๋„ˆ๋ฌด ์ปค์„œ ํ…Œ์ŠคํŠธํ•˜๊ธฐ ์–ด๋ ค์›€ + // ๋Œ€์‹  10์ดˆ ์ •๋„๋กœ ์ถฉ๋ถ„ํžˆ ๊ธด TTL์„ ์‚ฌ์šฉ + const largeTtlCache = new InMemoryCache({ ttlInSec: 10 }); + largeTtlCache.setCache('foo', { foo: 123 }); + + // ์—ฌ๊ธฐ์„œ๋Š” 10์ดˆ๋ณด๋‹ค ํ›จ์”ฌ ์งง์€ ์‹œ๊ฐ„ ํ›„์— ํ™•์ธ + await setTimeout(10); + + const data = largeTtlCache.getCache('foo'); + expect(data).toEqual({ foo: 123 }); + }); + + it('should correctly handle hit counter overflow', async () => { + const hitOverflowCache = new InMemoryCache({ ttlInSec: 10 }); + hitOverflowCache.setCache('foo', { foo: 123 }); + + // ๋กœ์ง ๊ฒ€์ฆ: InMemoryCache.ts๋Š” Number.MAX_VALUE์™€ ๋น„๊ต + hitOverflowCache.totalHit = Number.MAX_VALUE - 1; + + // Get cache to increment hit counter + hitOverflowCache.getCache('foo'); + + // ๋น„๋™๊ธฐ ํ˜ธ์ถœ์ด ์™„๋ฃŒ๋˜๋„๋ก ์•ฝ๊ฐ„์˜ ์ง€์—ฐ + await setTimeout(10); + + // ์‹ค์ œ ๊ตฌํ˜„์—์„œ๋Š” totalHit์ด 0์œผ๋กœ ๋ฆฌ์…‹๋˜๊ณ  hitCarry๊ฐ€ 1 ์ฆ๊ฐ€ + expect(hitOverflowCache.totalHit).toBe(0); + expect(hitOverflowCache.hitCarry).toBe(1); + }); + + it('should handle very long keys', async () => { + const longKey = 'a'.repeat(1000000); // 1 million chars + localCache.setCache(longKey, { value: 'test' }); + + await setTimeout(10); + + const data = localCache.getCache(longKey); + expect(data).toEqual({ value: 'test' }); + }); + + it('should handle storing very large objects', async () => { + // Create large object with deep nesting + const generateLargeObject = (depth: number, breadth: number): any => { + if (depth <= 0) { + return 'leaf'; + } + + const obj: Record = {}; + for (let i = 0; i < breadth; i++) { + obj[`key${i}`] = generateLargeObject(depth - 1, breadth); + } + return obj; + }; + + const largeObject = generateLargeObject(10, 5); + localCache.setCache('largeObj', largeObject); + + await setTimeout(10); + + const data = localCache.getCache('largeObj'); + expect(data).toEqual(largeObject); + }); + + it('should handle circular references gracefully', async () => { + const circularObj: any = { value: 1 }; + circularObj.self = circularObj; // Create circular reference + + expect(() => { + localCache.setCache('circularObj', circularObj); + }).not.toThrow(); + }); + + it('should handle multiple rapid cache operations', async () => { + // Perform lots of operations in quick succession + for (let i = 0; i < 1000; i++) { + localCache.setCache(`key${i}`, { value: i }); + } + + // Validate some random values + for (let i = 100; i < 110; i++) { + expect(localCache.getCache(`key${i}`)).toEqual({ value: i }); + } + + // Validate cache size + expect(InMemoryCache.snip(localCache).itemCount).toBe(1000); + }); + + it('should handle function that throws exception', async () => { + const throwingFn = () => { + throw new Error('Expected function error'); + }; + + expect(() => { + localCache.setCache('throwingFn', throwingFn); + }).toThrow('Expected function error'); + }); + }); }); diff --git a/src/RedisCache.spec.ts b/src/RedisCache.spec.ts index e5feb1f..fe216cd 100644 --- a/src/RedisCache.spec.ts +++ b/src/RedisCache.spec.ts @@ -241,4 +241,214 @@ describe('RedisCache', () => { } }); }); + + // Boundary value tests and overflow tests + describe('boundary and overflow tests', () => { + test('should handle empty string keys', async () => { + await cache.set('', 'empty-key-value'); + const value = await cache.get(''); + expect(value).toBe('empty-key-value'); + }); + + test('should handle very long keys', async () => { + const longKey = 'a'.repeat(10000); // 10K character key + await cache.set(longKey, 'long-key-value'); + const value = await cache.get(longKey); + expect(value).toBe('long-key-value'); + }); + + test('should handle very long values', async () => { + const longValue = 'a'.repeat(1000000); // 1M character value + await cache.set('longValue', longValue); + const value = await cache.get('longValue'); + expect(value).toBe(longValue); + }); + + test('should handle setting null and undefined values', async () => { + await cache.set('nullValue', null as any); + const nullValue = await cache.get('nullValue'); + expect(nullValue).toBe(''); + + await cache.set('undefinedValue', undefined as any); + const undefinedValue = await cache.get('undefinedValue'); + expect(undefinedValue).toBe(''); + }); + + test('should handle setting and retrieving special characters', async () => { + const specialChars = '!@#$%^&*()_+{}[]|\\:;"\'<>,.?/~`'; + await cache.set('specialChars', specialChars); + const value = await cache.get('specialChars'); + expect(value).toBe(specialChars); + }); + + test('should handle setting and retrieving emoji', async () => { + const emoji = '๐Ÿ˜€๐Ÿ™Œ๐Ÿ‘๐ŸŽ‰๐Ÿ”ฅ๐Ÿš€'; + await cache.set('emoji', emoji); + const value = await cache.get('emoji'); + expect(value).toBe(emoji); + }); + + test('should handle JSON serialization errors', async () => { + // Create object with circular reference + const circularObj: any = { key: 'value' }; + circularObj.self = circularObj; + + // Verify that withCache handles serialization errors gracefully + const result = await cache.withCache('circularObj', async () => { + return 'fallback value'; + }); + + expect(result).toBe('fallback value'); + }); + + test('should handle setting zero expiration time', async () => { + // This test checks the behavior when setting cache with zero TTL + await cache.set('zeroTtl', 'should-expire-immediately', 0); + + // Small delay to allow expiration + await new Promise((resolve) => setTimeout(resolve, 10)); + + const value = await cache.get('zeroTtl'); + expect(value).toBeNull(); + }); + + test('should handle negative expiration time', async () => { + // This test checks the behavior when setting cache with negative TTL + await cache.set('negativeTtl', 'should-expire-immediately', -1); + + // Small delay to allow expiration + await new Promise((resolve) => setTimeout(resolve, 10)); + + const value = await cache.get('negativeTtl'); + expect(value).toBeNull(); + }); + + test('should handle Redis connection failure', async () => { + // ์—๋Ÿฌ๋ฅผ ๊ฐ์ง€ํ•˜๋Š” ๋ฆฌ์Šค๋„ˆ ์„ค์ • + const errorListener = jest.fn(); + + // Redis ์—๋Ÿฌ ์ด๋ฒคํŠธ๋ฅผ ๋ฐœ์ƒ์‹œํ‚ค๋Š” ํ…Œ์ŠคํŠธ์šฉ ํด๋ž˜์Šค + class ErrorEmittingRedis extends Redis { + constructor() { + super(); + // ์—๋Ÿฌ ์ด๋ฒคํŠธ๋ฅผ ์ •์ƒ์ ์œผ๋กœ ๊ตฌ๋…ํ•˜์—ฌ unhandled error ๋ฐฉ์ง€ + this.on('error', errorListener); + // ์ธ์Šคํ„ด์Šค ์ƒ์„ฑ ํ›„ ์—๋Ÿฌ ๋ฐœ์ƒ์‹œํ‚ค๊ธฐ (process.nextTick์œผ๋กœ ๋น„๋™๊ธฐ ์‹คํ–‰) + process.nextTick(() => { + this.emit('error', new Error('Test connection error')); + }); + } + } + + // ์—๋Ÿฌ ๋ฐœ์ƒํ•˜๋Š” Redis ํด๋ผ์ด์–ธํŠธ๋กœ ์บ์‹œ ์ƒ์„ฑ + const failingClientCache = RedisCache.create({ + createRedisClient: () => new ErrorEmittingRedis(), + }); + + // ์•ฝ๊ฐ„์˜ ์ง€์—ฐ ํ›„ ๊ฒ€์ฆ (์—๋Ÿฌ ์ด๋ฒคํŠธ๊ฐ€ ๋ฐœ์ƒํ•  ์‹œ๊ฐ„ ํ•„์š”) + await new Promise((resolve) => setTimeout(resolve, 50)); + + // ์—๋Ÿฌ ๋ฆฌ์Šค๋„ˆ๊ฐ€ ํ˜ธ์ถœ๋˜์—ˆ๋Š”์ง€ ํ™•์ธ + expect(errorListener).toHaveBeenCalled(); + + try { + // ์—ฐ๊ฒฐ ์‹คํŒจ ํ›„์—๋„ ์ž‘์—… ์‹œ๋„ + await failingClientCache.set('test-key', 'test-value'); + // ์—ฌ๊ธฐ์„œ ์˜ˆ์™ธ๊ฐ€ ๋ฐœ์ƒํ•˜์ง€ ์•Š๋Š”๋‹ค๋ฉด, ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ๋‚ด๋ถ€์—์„œ ์—๋Ÿฌ๋ฅผ ์ ์ ˆํžˆ ์ฒ˜๋ฆฌํ•œ๋‹ค๋Š” ์˜๋ฏธ + expect(true).toBe(true); + } catch (error) { + // ์˜ˆ์™ธ๊ฐ€ ๋ฐœ์ƒํ•˜๋”๋ผ๋„ ํ…Œ์ŠคํŠธ๋Š” ์‹คํŒจํ•˜์ง€ ์•Š๊ณ  ์˜ˆ์™ธ๋ฅผ ๊ธฐ๋ก + expect(error).toBeDefined(); + } finally { + // ์ •๋ฆฌ + failingClientCache.destroy(); + } + }); + + test('should handle concurrent list operations', async () => { + const list = cache.list('concurrentList'); + + // Multiple concurrent push operations + const pushPromises: Promise[] = []; + for (let i = 0; i < 100; i++) { + pushPromises.push(list.push(`item-${i}`)); + } + + await Promise.all(pushPromises); + + // Check length is correct + const length = await list.length(); + expect(length).toBe(100); + }); + + test('should handle concurrent map operations', async () => { + const map = cache.map('concurrentMap'); + + // Multiple concurrent set operations + const setPromises: Promise[] = []; + for (let i = 0; i < 100; i++) { + setPromises.push(map.set(`key-${i}`, `value-${i}`)); + } + + await Promise.all(setPromises); + + // Check length is correct + const length = await map.length(); + expect(length).toBe(100); + }); + + test('should handle concurrent set operations', async () => { + const set = cache.setOf('concurrentSet'); + + // Multiple concurrent add operations + const addPromises: Promise[] = []; + for (let i = 0; i < 100; i++) { + addPromises.push(set.add(`item-${i}`)); + } + + await Promise.all(addPromises); + + // Check length is correct + const length = await set.length(); + expect(length).toBe(100); + }); + + test('should handle complex nested data structures', async () => { + // Create a complex object with nested arrays and objects + const complexData = { + array: [1, 2, 3, { key: 'value' }], + object: { + nested: { + deeply: { + value: 42, + array: [5, 6, 7], + }, + }, + }, + mixed: [{ a: 1 }, { b: 2 }, [1, 2, 3]], + }; + + // Test with withCache to check serialization/deserialization + const result = await cache.withCache('complexData', async () => { + return complexData; + }); + + // Result should match the original data structure + expect(JSON.stringify(result)).toBe(JSON.stringify(complexData)); + }); + + test('should handle extremely large cacheKey input', async () => { + // Create a large object + const largeObj: Record = {}; + for (let i = 0; i < 10000; i++) { + largeObj[`key${i}`] = `value${i}`; + } + + // Generate cache key shouldn't throw + expect(() => { + const key = cache.cacheKey(largeObj); + expect(typeof key).toBe('string'); + }).not.toThrow(); + }); + }); });