Redis & Advanced Caching

Testing and Debugging Cache

18 min Lesson 29 of 30

Testing and Debugging Cache

Proper testing and debugging of caching systems is crucial for ensuring reliability, performance, and data consistency. Let's explore comprehensive testing strategies and debugging techniques for Redis-based caching.

Unit Testing Cached Code

Write unit tests for your caching logic using Jest and mock Redis:

// Install testing dependencies
// npm install --save-dev jest redis-mock

// __tests__/cacheService.test.js
const RedisMock = require('redis-mock');
const CacheService = require('../src/services/cacheService');

// Mock Redis client
jest.mock('../src/config/redis', () => {
return RedisMock.createClient();
});

describe('CacheService', () => {
let cacheService;

beforeEach(() => {
cacheService = new CacheService();
cacheService.resetMetrics();
});

afterEach(async () => {
// Clear all keys after each test
const redis = require('../src/config/redis');
const keys = await redis.keys('*');
if (keys.length > 0) {
await redis.del(keys);
}
});

describe('get', () => {
test('should return cached value on cache hit', async () => {
const key = 'test:key';
const value = { id: 1, name: 'Test' };

// Set value in cache
await cacheService.set(key, value);

// Fetch should return cached value
const fetcher = jest.fn();
const result = await cacheService.get(key, fetcher);

expect(result).toEqual(value);
expect(fetcher).not.toHaveBeenCalled();
expect(cacheService.getMetrics().hits).toBe(1);
});

test('should call fetcher on cache miss', async () => {
const key = 'test:miss';
const value = { id: 2, name: 'Fetched' };

const fetcher = jest.fn().mockResolvedValue(value);
const result = await cacheService.get(key, fetcher);

expect(result).toEqual(value);
expect(fetcher).toHaveBeenCalledTimes(1);
expect(cacheService.getMetrics().misses).toBe(1);
});

test('should cache fetched value', async () => {
const key = 'test:fetch';
const value = { id: 3, name: 'Cached' };

const fetcher = jest.fn().mockResolvedValue(value);

// First call - cache miss
await cacheService.get(key, fetcher);
expect(fetcher).toHaveBeenCalledTimes(1);

// Second call - cache hit
await cacheService.get(key, fetcher);
expect(fetcher).toHaveBeenCalledTimes(1); // Not called again
});

test('should handle errors gracefully', async () => {
const key = 'test:error';
const value = { id: 4, name: 'Fallback' };

// Mock Redis error
const redis = require('../src/config/redis');
jest.spyOn(redis, 'get').mockRejectedValueOnce(new Error('Redis error'));

const fetcher = jest.fn().mockResolvedValue(value);
const result = await cacheService.get(key, fetcher);

expect(result).toEqual(value);
expect(fetcher).toHaveBeenCalled();
expect(cacheService.getMetrics().errors).toBe(1);
});
});

describe('delPattern', () => {
test('should delete keys matching pattern', async () => {
await cacheService.set('products:1', { id: 1 });
await cacheService.set('products:2', { id: 2 });
await cacheService.set('users:1', { id: 1 });

const count = await cacheService.delPattern('products:*');

expect(count).toBe(2);

// Verify deletion
const redis = require('../src/config/redis');
const products1 = await redis.get('products:1');
const users1 = await redis.get('users:1');

expect(products1).toBeNull();
expect(users1).not.toBeNull();
});
});
});

Mocking Redis for Tests

Create a flexible Redis mock for integration tests:

// __tests__/helpers/redisMock.js
class RedisMockClient {
constructor() {
this.store = new Map();
this.isReady = true;
}

async get(key) {
const item = this.store.get(key);
if (!item) return null;

// Check expiration
if (item.expiresAt && Date.now() > item.expiresAt) {
this.store.delete(key);
return null;
}

return item.value;
}

async set(key, value) {
this.store.set(key, { value, expiresAt: null });
return 'OK';
}

async setEx(key, seconds, value) {
this.store.set(key, {
value,
expiresAt: Date.now() + (seconds * 1000)
});
return 'OK';
}

async del(...keys) {
let count = 0;
for (const key of keys) {
if (this.store.delete(key)) count++;
}
return count;
}

async keys(pattern) {
const regex = new RegExp(pattern.replace('*', '.*'));
return Array.from(this.store.keys()).filter(key => regex.test(key));
}

async flushAll() {
this.store.clear();
return 'OK';
}

// Pub/Sub methods
async publish(channel, message) {
return 1; // Number of subscribers
}

async subscribe(channel) {
return 'OK';
}

// Simulate connection
async connect() {
this.isReady = true;
}

async quit() {
this.isReady = false;
}

duplicate() {
return new RedisMockClient();
}
}

module.exports = RedisMockClient;

Integration Testing

Test caching with actual API endpoints:

// __tests__/integration/productAPI.test.js
const request = require('supertest');
const app = require('../../src/app');
const Product = require('../../src/models/product');
const cacheService = require('../../src/services/cacheService');

describe('Product API Caching', () => {
beforeEach(async () => {
// Clear database and cache
await Product.deleteMany({});
await cacheService.delPattern('*');
cacheService.resetMetrics();
});

describe('GET /api/products/:id', () => {
test('should cache product on first request', async () => {
const product = await Product.create({
name: 'Test Product',
price: 99.99,
category: 'Electronics'
});

// First request - cache miss
const res1 = await request(app)
.get(`/api/products/${product._id}`)
.expect(200);

expect(res1.headers['x-cache']).toBe('MISS');
expect(res1.body.data.name).toBe('Test Product');

// Second request - cache hit
const res2 = await request(app)
.get(`/api/products/${product._id}`)
.expect(200);

expect(res2.headers['x-cache']).toBe('HIT');
expect(res2.body.data.name).toBe('Test Product');

const metrics = cacheService.getMetrics();
expect(metrics.hits).toBe(1);
expect(metrics.misses).toBe(1);
});

test('should invalidate cache on update', async () => {
const product = await Product.create({
name: 'Original Name',
price: 50,
category: 'Books'
});

// Cache the product
await request(app)
.get(`/api/products/${product._id}`)
.expect(200);

// Update product
await request(app)
.put(`/api/products/${product._id}`)
.send({ name: 'Updated Name' })
.expect(200);

// Next request should fetch fresh data
const res = await request(app)
.get(`/api/products/${product._id}`)
.expect(200);

expect(res.headers['x-cache']).toBe('MISS');
expect(res.body.data.name).toBe('Updated Name');
});
});

describe('GET /api/products', () => {
test('should cache product list', async () => {
await Product.create([
{ name: 'Product 1', price: 10, category: 'A' },
{ name: 'Product 2', price: 20, category: 'A' }
]);

const res1 = await request(app)
.get('/api/products')
.expect(200);

expect(res1.headers['x-cache']).toBe('MISS');
expect(res1.body.count).toBe(2);

const res2 = await request(app)
.get('/api/products')
.expect(200);

expect(res2.headers['x-cache']).toBe('HIT');
});
});
});

Cache Hit Ratio Monitoring

Implement comprehensive cache monitoring and analytics:

// src/services/cacheMonitor.js
const redisClient = require('../config/redis');

class CacheMonitor {
constructor() {
this.metricsKey = 'cache:metrics';
this.startTime = Date.now();
}

/**
* Record cache hit
*/
async recordHit(key, responseTime) {
await redisClient.hIncrBy(this.metricsKey, 'hits', 1);
await this.recordResponseTime(responseTime);
}

/**
* Record cache miss
*/
async recordMiss(key, responseTime) {
await redisClient.hIncrBy(this.metricsKey, 'misses', 1);
await this.recordResponseTime(responseTime);
}

/**
* Record response time
*/
async recordResponseTime(ms) {
const bucket = Math.floor(ms / 10) * 10; // 10ms buckets
await redisClient.hIncrBy('cache:response_times', bucket, 1);
}

/**
* Get comprehensive metrics
*/
async getMetrics() {
const [metrics, responseTimes, info] = await Promise.all([
redisClient.hGetAll(this.metricsKey),
redisClient.hGetAll('cache:response_times'),
redisClient.info('stats')
]);

const hits = parseInt(metrics.hits || 0);
const misses = parseInt(metrics.misses || 0);
const total = hits + misses;

// Calculate percentiles from response time buckets
const times = Object.entries(responseTimes)
.map(([bucket, count]) => ({
time: parseInt(bucket),
count: parseInt(count)
}))
.sort((a, b) => a.time - b.time);

return {
hitRate: total > 0 ? ((hits / total) * 100).toFixed(2) : 0,
hits,
misses,
total,
uptime: Math.floor((Date.now() - this.startTime) / 1000),
responseTimes: this.calculatePercentiles(times),
redis: this.parseRedisInfo(info)
};
}

calculatePercentiles(times) {
const totalCount = times.reduce((sum, t) => sum + t.count, 0);
if (totalCount === 0) return {};

const percentiles = { p50: 0, p95: 0, p99: 0 };
let cumulative = 0;

for (const { time, count } of times) {
cumulative += count;
const percent = (cumulative / totalCount) * 100;

if (percent >= 50 && !percentiles.p50) percentiles.p50 = time;
if (percent >= 95 && !percentiles.p95) percentiles.p95 = time;
if (percent >= 99 && !percentiles.p99) percentiles.p99 = time;
}

return percentiles;
}

parseRedisInfo(info) {
const lines = info.split('\r\n');
const stats = {};

for (const line of lines) {
if (line.includes(':')) {
const [key, value] = line.split(':');
stats[key] = value;
}
}

return {
totalConnectionsReceived: stats.total_connections_received,
totalCommandsProcessed: stats.total_commands_processed,
keysExpired: stats.expired_keys,
keysEvicted: stats.evicted_keys
};
}

/**
* Reset all metrics
*/
async reset() {
await redisClient.del(this.metricsKey, 'cache:response_times');
this.startTime = Date.now();
}
}

module.exports = new CacheMonitor();

Debugging Cache Issues

Common cache problems and how to debug them:

// Debug utility for cache inspection
class CacheDebugger {
constructor(redis) {
this.redis = redis;
}

/**
* Inspect cache key details
*/
async inspectKey(key) {
const [value, ttl, type] = await Promise.all([
this.redis.get(key),
this.redis.ttl(key),
this.redis.type(key)
]);

return {
key,
exists: value !== null,
type,
ttl: ttl > 0 ? ttl : 'no expiration',
size: value ? Buffer.byteLength(value) : 0,
value: value ? JSON.parse(value) : null
};
}

/**
* Find large cache entries
*/
async findLargeKeys(minSize = 1024) {
const keys = await this.redis.keys('*');
const large = [];

for (const key of keys) {
const value = await this.redis.get(key);
if (value) {
const size = Buffer.byteLength(value);
if (size >= minSize) {
large.push({ key, size });
}
}
}

return large.sort((a, b) => b.size - a.size);
}

/**
* Find keys without expiration
*/
async findKeysWithoutExpiration() {
const keys = await this.redis.keys('*');
const noExpiry = [];

for (const key of keys) {
const ttl = await this.redis.ttl(key);
if (ttl === -1) { // -1 means no expiration
noExpiry.push(key);
}
}

return noExpiry;
}

/**
* Analyze cache key patterns
*/
async analyzeKeyPatterns() {
const keys = await this.redis.keys('*');
const patterns = {};

for (const key of keys) {
const prefix = key.split(':')[0];
patterns[prefix] = (patterns[prefix] || 0) + 1;
}

return Object.entries(patterns)
.map(([prefix, count]) => ({ prefix, count }))
.sort((a, b) => b.count - a.count);
}
}

module.exports = CacheDebugger;
Common Cache Issues:
  • Stale Data: Check TTL values and invalidation logic
  • Low Hit Rate: Review cache key strategy and TTL values
  • Memory Growth: Look for keys without expiration or large values
  • Cache Stampede: Implement lock mechanisms or stale-while-revalidate
  • Inconsistent Data: Verify cache invalidation is working across all update paths

Load Testing Cache Performance

Test your cache under realistic load:

// Install artillery for load testing
// npm install -g artillery

// load-test.yml
config:
target: 'http://localhost:3000'
phases:
- duration: 60
arrivalRate: 10
name: 'Warm up'
- duration: 300
arrivalRate: 50
name: 'Sustained load'
- duration: 120
arrivalRate: 100
name: 'Peak load'
plugins:
expect: {}

scenarios:
- name: 'Get product (should be cached)'
weight: 70
flow:
- get:
url: '/api/products/{{ $randomNumber(1, 100) }}'
expect:
- statusCode: 200
- hasHeader: 'X-Cache'

- name: 'List products (should be cached)'
weight: 20
flow:
- get:
url: '/api/products?category={{ $randomString() }}'
expect:
- statusCode: 200

- name: 'Update product (invalidates cache)'
weight: 10
flow:
- put:
url: '/api/products/{{ $randomNumber(1, 100) }}'
json:
price: {{ $randomNumber(10, 1000) }}
expect:
- statusCode: 200
// Run load test and collect metrics
// artillery run load-test.yml --output report.json
// artillery report report.json

// Custom load test script
const autocannon = require('autocannon');

async function runLoadTest() {
const instance = autocannon({
url: 'http://localhost:3000',
connections: 100,
duration: 60,
pipelining: 10,
requests: [
{
method: 'GET',
path: '/api/products/123'
}
]
});

autocannon.track(instance, { renderProgressBar: true });

instance.on('done', (results) => {
console.log('Load Test Results:');
console.log(`Requests: ${results.requests.total}`);
console.log(`Throughput: ${results.throughput.mean} req/sec`);
console.log(`Latency p50: ${results.latency.p50}ms`);
console.log(`Latency p99: ${results.latency.p99}ms`);
console.log(`Errors: ${results.errors}`);
});
}

runLoadTest();
Performance Benchmarks: A well-optimized Redis cache should achieve: 95%+ hit rate for frequently accessed data, sub-1ms response time for cache hits, 10,000+ requests/second on modest hardware, and minimal memory growth over time.
Exercise: Create a comprehensive cache testing suite that includes:
  • Unit tests for all cache service methods with edge cases
  • Integration tests verifying cache invalidation across all API endpoints
  • Performance tests measuring cache hit rates under load
  • Debug script that identifies cache issues (stale data, large keys, missing expirations)
  • Monitoring dashboard showing real-time cache metrics and alerts
Testing Best Practices: Always use mock Redis in unit tests, test cache invalidation thoroughly, monitor cache hit rates in production, implement alerts for low hit rates or high error rates, and regularly review cache key patterns and TTL values.