Redis & Advanced Caching
Building a Caching Layer Project (Part 1)
Building a Caching Layer Project (Part 1)
Let's build a production-ready caching layer for a RESTful API using Express.js and Redis. This project will demonstrate real-world caching patterns and best practices.
Project Architecture
We'll build a product catalog API with multiple caching strategies:
// Project Structure
product-api/
├── src/
│ ├── config/
│ │ └── redis.js // Redis configuration
│ ├── middleware/
│ │ ├── cache.js // Cache middleware
│ │ └── rateLimiter.js // Rate limiting
│ ├── services/
│ │ ├── cacheService.js // Cache abstraction layer
│ │ └── productService.js // Business logic
│ ├── controllers/
│ │ └── productController.js
│ ├── routes/
│ │ └── products.js
│ └── app.js
├── package.json
└── .env
product-api/
├── src/
│ ├── config/
│ │ └── redis.js // Redis configuration
│ ├── middleware/
│ │ ├── cache.js // Cache middleware
│ │ └── rateLimiter.js // Rate limiting
│ ├── services/
│ │ ├── cacheService.js // Cache abstraction layer
│ │ └── productService.js // Business logic
│ ├── controllers/
│ │ └── productController.js
│ ├── routes/
│ │ └── products.js
│ └── app.js
├── package.json
└── .env
Project Features:
- Cache-aside pattern for product data
- Cache invalidation on updates
- Automatic cache warming
- Cache hit/miss metrics
- Rate limiting with Redis
- Graceful Redis connection handling
Setting Up the API with Express
First, let's set up the basic Express application:
// package.json
{
"name": "product-api",
"version": "1.0.0",
"scripts": {
"start": "node src/app.js",
"dev": "nodemon src/app.js"
},
"dependencies": {
"express": "^4.18.2",
"redis": "^4.6.0",
"dotenv": "^16.0.3",
"mongoose": "^7.0.0"
},
"devDependencies": {
"nodemon": "^2.0.20"
}
}
{
"name": "product-api",
"version": "1.0.0",
"scripts": {
"start": "node src/app.js",
"dev": "nodemon src/app.js"
},
"dependencies": {
"express": "^4.18.2",
"redis": "^4.6.0",
"dotenv": "^16.0.3",
"mongoose": "^7.0.0"
},
"devDependencies": {
"nodemon": "^2.0.20"
}
}
// .env
PORT=3000
MONGODB_URI=mongodb://localhost:27017/productdb
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=
CACHE_TTL=3600
NODE_ENV=development
PORT=3000
MONGODB_URI=mongodb://localhost:27017/productdb
REDIS_HOST=localhost
REDIS_PORT=6379
REDIS_PASSWORD=
CACHE_TTL=3600
NODE_ENV=development
// src/app.js
require('dotenv').config();
const express = require('express');
const mongoose = require('mongoose');
const redisClient = require('./config/redis');
const productRoutes = require('./routes/products');
const app = express();
const PORT = process.env.PORT || 3000;
// Middleware
app.use(express.json());
// Health check endpoint
app.get('/health', async (req, res) => {
const health = {
uptime: process.uptime(),
timestamp: Date.now(),
mongodb: mongoose.connection.readyState === 1 ? 'connected' : 'disconnected',
redis: redisClient.isReady ? 'connected' : 'disconnected'
};
res.json(health);
});
// Routes
app.use('/api/products', productRoutes);
// Error handling
app.use((err, req, res, next) => {
console.error(err.stack);
res.status(500).json({ error: 'Internal server error' });
});
// Connect to MongoDB
mongoose.connect(process.env.MONGODB_URI)
.then(() => console.log('MongoDB connected'))
.catch(err => console.error('MongoDB connection error:', err));
// Start server
app.listen(PORT, () => {
console.log(`Server running on port ${PORT}`);
});
// Graceful shutdown
process.on('SIGTERM', async () => {
console.log('SIGTERM received, shutting down gracefully...');
await redisClient.quit();
await mongoose.connection.close();
process.exit(0);
});
require('dotenv').config();
const express = require('express');
const mongoose = require('mongoose');
const redisClient = require('./config/redis');
const productRoutes = require('./routes/products');
const app = express();
const PORT = process.env.PORT || 3000;
// Middleware
app.use(express.json());
// Health check endpoint
app.get('/health', async (req, res) => {
const health = {
uptime: process.uptime(),
timestamp: Date.now(),
mongodb: mongoose.connection.readyState === 1 ? 'connected' : 'disconnected',
redis: redisClient.isReady ? 'connected' : 'disconnected'
};
res.json(health);
});
// Routes
app.use('/api/products', productRoutes);
// Error handling
app.use((err, req, res, next) => {
console.error(err.stack);
res.status(500).json({ error: 'Internal server error' });
});
// Connect to MongoDB
mongoose.connect(process.env.MONGODB_URI)
.then(() => console.log('MongoDB connected'))
.catch(err => console.error('MongoDB connection error:', err));
// Start server
app.listen(PORT, () => {
console.log(`Server running on port ${PORT}`);
});
// Graceful shutdown
process.on('SIGTERM', async () => {
console.log('SIGTERM received, shutting down gracefully...');
await redisClient.quit();
await mongoose.connection.close();
process.exit(0);
});
Redis Connection Setup
Create a robust Redis client with error handling and reconnection logic:
// src/config/redis.js
const redis = require('redis');
const redisClient = redis.createClient({
socket: {
host: process.env.REDIS_HOST || 'localhost',
port: process.env.REDIS_PORT || 6379,
reconnectStrategy: (retries) => {
if (retries > 10) {
console.error('Redis reconnection limit reached');
return new Error('Redis reconnection failed');
}
// Exponential backoff
return Math.min(retries * 100, 3000);
}
},
password: process.env.REDIS_PASSWORD || undefined,
database: 0
});
// Event handlers
redisClient.on('connect', () => {
console.log('Redis client connecting...');
});
redisClient.on('ready', () => {
console.log('Redis client ready');
});
redisClient.on('error', (err) => {
console.error('Redis error:', err);
});
redisClient.on('reconnecting', () => {
console.log('Redis client reconnecting...');
});
// Connect to Redis
redisClient.connect().catch(err => {
console.error('Failed to connect to Redis:', err);
});
module.exports = redisClient;
const redis = require('redis');
const redisClient = redis.createClient({
socket: {
host: process.env.REDIS_HOST || 'localhost',
port: process.env.REDIS_PORT || 6379,
reconnectStrategy: (retries) => {
if (retries > 10) {
console.error('Redis reconnection limit reached');
return new Error('Redis reconnection failed');
}
// Exponential backoff
return Math.min(retries * 100, 3000);
}
},
password: process.env.REDIS_PASSWORD || undefined,
database: 0
});
// Event handlers
redisClient.on('connect', () => {
console.log('Redis client connecting...');
});
redisClient.on('ready', () => {
console.log('Redis client ready');
});
redisClient.on('error', (err) => {
console.error('Redis error:', err);
});
redisClient.on('reconnecting', () => {
console.log('Redis client reconnecting...');
});
// Connect to Redis
redisClient.connect().catch(err => {
console.error('Failed to connect to Redis:', err);
});
module.exports = redisClient;
Implementing Cache-Aside Pattern
Create a reusable caching service with the cache-aside pattern:
// src/services/cacheService.js
const redisClient = require('../config/redis');
class CacheService {
constructor() {
this.defaultTTL = parseInt(process.env.CACHE_TTL) || 3600;
this.metrics = {
hits: 0,
misses: 0,
errors: 0
};
}
/**
* Get data from cache or fetch from source
* @param {string} key - Cache key
* @param {Function} fetchFn - Function to fetch data on cache miss
* @param {number} ttl - Time to live in seconds
*/
async get(key, fetchFn, ttl = this.defaultTTL) {
try {
// Try to get from cache
const cached = await redisClient.get(key);
if (cached) {
this.metrics.hits++;
return JSON.parse(cached);
}
// Cache miss - fetch from source
this.metrics.misses++;
const data = await fetchFn();
// Store in cache (fire and forget)
this.set(key, data, ttl).catch(err => {
console.error('Cache set error:', err);
});
return data;
} catch (err) {
this.metrics.errors++;
console.error('Cache get error:', err);
// Fallback to fetching data directly
return await fetchFn();
}
}
/**
* Set data in cache
*/
async set(key, value, ttl = this.defaultTTL) {
try {
const serialized = JSON.stringify(value);
await redisClient.setEx(key, ttl, serialized);
return true;
} catch (err) {
console.error('Cache set error:', err);
return false;
}
}
/**
* Delete key from cache
*/
async del(key) {
try {
await redisClient.del(key);
return true;
} catch (err) {
console.error('Cache delete error:', err);
return false;
}
}
/**
* Delete multiple keys matching pattern
*/
async delPattern(pattern) {
try {
const keys = await redisClient.keys(pattern);
if (keys.length > 0) {
await redisClient.del(keys);
}
return keys.length;
} catch (err) {
console.error('Cache delete pattern error:', err);
return 0;
}
}
/**
* Get cache metrics
*/
getMetrics() {
const total = this.metrics.hits + this.metrics.misses;
return {
hits: this.metrics.hits,
misses: this.metrics.misses,
errors: this.metrics.errors,
hitRate: total > 0 ? (this.metrics.hits / total * 100).toFixed(2) : 0
};
}
/**
* Reset metrics
*/
resetMetrics() {
this.metrics = { hits: 0, misses: 0, errors: 0 };
}
}
module.exports = new CacheService();
const redisClient = require('../config/redis');
class CacheService {
constructor() {
this.defaultTTL = parseInt(process.env.CACHE_TTL) || 3600;
this.metrics = {
hits: 0,
misses: 0,
errors: 0
};
}
/**
* Get data from cache or fetch from source
* @param {string} key - Cache key
* @param {Function} fetchFn - Function to fetch data on cache miss
* @param {number} ttl - Time to live in seconds
*/
async get(key, fetchFn, ttl = this.defaultTTL) {
try {
// Try to get from cache
const cached = await redisClient.get(key);
if (cached) {
this.metrics.hits++;
return JSON.parse(cached);
}
// Cache miss - fetch from source
this.metrics.misses++;
const data = await fetchFn();
// Store in cache (fire and forget)
this.set(key, data, ttl).catch(err => {
console.error('Cache set error:', err);
});
return data;
} catch (err) {
this.metrics.errors++;
console.error('Cache get error:', err);
// Fallback to fetching data directly
return await fetchFn();
}
}
/**
* Set data in cache
*/
async set(key, value, ttl = this.defaultTTL) {
try {
const serialized = JSON.stringify(value);
await redisClient.setEx(key, ttl, serialized);
return true;
} catch (err) {
console.error('Cache set error:', err);
return false;
}
}
/**
* Delete key from cache
*/
async del(key) {
try {
await redisClient.del(key);
return true;
} catch (err) {
console.error('Cache delete error:', err);
return false;
}
}
/**
* Delete multiple keys matching pattern
*/
async delPattern(pattern) {
try {
const keys = await redisClient.keys(pattern);
if (keys.length > 0) {
await redisClient.del(keys);
}
return keys.length;
} catch (err) {
console.error('Cache delete pattern error:', err);
return 0;
}
}
/**
* Get cache metrics
*/
getMetrics() {
const total = this.metrics.hits + this.metrics.misses;
return {
hits: this.metrics.hits,
misses: this.metrics.misses,
errors: this.metrics.errors,
hitRate: total > 0 ? (this.metrics.hits / total * 100).toFixed(2) : 0
};
}
/**
* Reset metrics
*/
resetMetrics() {
this.metrics = { hits: 0, misses: 0, errors: 0 };
}
}
module.exports = new CacheService();
Cache Middleware
Create Express middleware for automatic response caching:
// src/middleware/cache.js
const cacheService = require('../services/cacheService');
/**
* Cache middleware for Express routes
* @param {number} ttl - Time to live in seconds
* @param {Function} keyFn - Optional function to generate cache key
*/
const cacheMiddleware = (ttl = 3600, keyFn = null) => {
return async (req, res, next) => {
// Only cache GET requests
if (req.method !== 'GET') {
return next();
}
// Generate cache key
const cacheKey = keyFn
? keyFn(req)
: `cache:${req.originalUrl || req.url}`;
try {
// Try to get from cache
const cached = await cacheService.get(
cacheKey,
() => null, // Don't fetch on miss, let route handler do it
ttl
);
if (cached) {
// Set cache header
res.set('X-Cache', 'HIT');
return res.json(cached);
}
// Cache miss - intercept response
res.set('X-Cache', 'MISS');
const originalJson = res.json.bind(res);
res.json = (data) => {
// Cache successful responses only
if (res.statusCode === 200 && data) {
cacheService.set(cacheKey, data, ttl).catch(err => {
console.error('Failed to cache response:', err);
});
}
return originalJson(data);
};
next();
} catch (err) {
console.error('Cache middleware error:', err);
// Continue without caching on error
next();
}
};
};
/**
* Middleware to invalidate cache by pattern
*/
const invalidateCacheMiddleware = (patternFn) => {
return async (req, res, next) => {
// Store original res.json to call after invalidation
const originalJson = res.json.bind(res);
res.json = async (data) => {
// Only invalidate on successful operations
if (res.statusCode >= 200 && res.statusCode < 300) {
const pattern = patternFn(req);
await cacheService.delPattern(pattern);
}
return originalJson(data);
};
next();
};
};
module.exports = {
cacheMiddleware,
invalidateCacheMiddleware
};
const cacheService = require('../services/cacheService');
/**
* Cache middleware for Express routes
* @param {number} ttl - Time to live in seconds
* @param {Function} keyFn - Optional function to generate cache key
*/
const cacheMiddleware = (ttl = 3600, keyFn = null) => {
return async (req, res, next) => {
// Only cache GET requests
if (req.method !== 'GET') {
return next();
}
// Generate cache key
const cacheKey = keyFn
? keyFn(req)
: `cache:${req.originalUrl || req.url}`;
try {
// Try to get from cache
const cached = await cacheService.get(
cacheKey,
() => null, // Don't fetch on miss, let route handler do it
ttl
);
if (cached) {
// Set cache header
res.set('X-Cache', 'HIT');
return res.json(cached);
}
// Cache miss - intercept response
res.set('X-Cache', 'MISS');
const originalJson = res.json.bind(res);
res.json = (data) => {
// Cache successful responses only
if (res.statusCode === 200 && data) {
cacheService.set(cacheKey, data, ttl).catch(err => {
console.error('Failed to cache response:', err);
});
}
return originalJson(data);
};
next();
} catch (err) {
console.error('Cache middleware error:', err);
// Continue without caching on error
next();
}
};
};
/**
* Middleware to invalidate cache by pattern
*/
const invalidateCacheMiddleware = (patternFn) => {
return async (req, res, next) => {
// Store original res.json to call after invalidation
const originalJson = res.json.bind(res);
res.json = async (data) => {
// Only invalidate on successful operations
if (res.statusCode >= 200 && res.statusCode < 300) {
const pattern = patternFn(req);
await cacheService.delPattern(pattern);
}
return originalJson(data);
};
next();
};
};
module.exports = {
cacheMiddleware,
invalidateCacheMiddleware
};
Cache Key Strategy: Use descriptive, hierarchical keys like `cache:products:list`, `cache:products:123`, `cache:categories:5:products` to enable pattern-based invalidation and easy debugging.
Product Model and Service
Create the product data model and service layer:
// src/models/product.js (Mongoose model)
const mongoose = require('mongoose');
const productSchema = new mongoose.Schema({
name: { type: String, required: true, index: true },
description: String,
price: { type: Number, required: true, min: 0 },
category: { type: String, required: true, index: true },
stock: { type: Number, default: 0 },
imageUrl: String,
tags: [String],
isActive: { type: Boolean, default: true }
}, {
timestamps: true
});
// Indexes for common queries
productSchema.index({ category: 1, price: 1 });
productSchema.index({ name: 'text', description: 'text' });
module.exports = mongoose.model('Product', productSchema);
const mongoose = require('mongoose');
const productSchema = new mongoose.Schema({
name: { type: String, required: true, index: true },
description: String,
price: { type: Number, required: true, min: 0 },
category: { type: String, required: true, index: true },
stock: { type: Number, default: 0 },
imageUrl: String,
tags: [String],
isActive: { type: Boolean, default: true }
}, {
timestamps: true
});
// Indexes for common queries
productSchema.index({ category: 1, price: 1 });
productSchema.index({ name: 'text', description: 'text' });
module.exports = mongoose.model('Product', productSchema);
// src/services/productService.js
const Product = require('../models/product');
const cacheService = require('./cacheService');
class ProductService {
/**
* Get all products with caching
*/
async getAll(filters = {}) {
const cacheKey = `products:list:${JSON.stringify(filters)}`;
return await cacheService.get(
cacheKey,
async () => {
const query = { isActive: true };
if (filters.category) query.category = filters.category;
if (filters.minPrice) query.price = { $gte: filters.minPrice };
if (filters.maxPrice) query.price = { ...query.price, $lte: filters.maxPrice };
return await Product.find(query)
.sort({ createdAt: -1 })
.limit(100)
.lean();
},
300 // 5 minutes TTL for list
);
}
/**
* Get product by ID with caching
*/
async getById(id) {
const cacheKey = `products:${id}`;
return await cacheService.get(
cacheKey,
async () => {
const product = await Product.findById(id).lean();
if (!product) {
throw new Error('Product not found');
}
return product;
},
3600 // 1 hour TTL for individual products
);
}
/**
* Create new product
*/
async create(productData) {
const product = await Product.create(productData);
// Invalidate list cache
await cacheService.delPattern('products:list:*');
return product;
}
/**
* Update product and invalidate cache
*/
async update(id, updates) {
const product = await Product.findByIdAndUpdate(id, updates, { new: true });
if (!product) {
throw new Error('Product not found');
}
// Invalidate specific product and list caches
await cacheService.del(`products:${id}`);
await cacheService.delPattern('products:list:*');
return product;
}
/**
* Delete product
*/
async delete(id) {
const product = await Product.findByIdAndDelete(id);
if (!product) {
throw new Error('Product not found');
}
// Invalidate caches
await cacheService.del(`products:${id}`);
await cacheService.delPattern('products:list:*');
return product;
}
}
module.exports = new ProductService();
const Product = require('../models/product');
const cacheService = require('./cacheService');
class ProductService {
/**
* Get all products with caching
*/
async getAll(filters = {}) {
const cacheKey = `products:list:${JSON.stringify(filters)}`;
return await cacheService.get(
cacheKey,
async () => {
const query = { isActive: true };
if (filters.category) query.category = filters.category;
if (filters.minPrice) query.price = { $gte: filters.minPrice };
if (filters.maxPrice) query.price = { ...query.price, $lte: filters.maxPrice };
return await Product.find(query)
.sort({ createdAt: -1 })
.limit(100)
.lean();
},
300 // 5 minutes TTL for list
);
}
/**
* Get product by ID with caching
*/
async getById(id) {
const cacheKey = `products:${id}`;
return await cacheService.get(
cacheKey,
async () => {
const product = await Product.findById(id).lean();
if (!product) {
throw new Error('Product not found');
}
return product;
},
3600 // 1 hour TTL for individual products
);
}
/**
* Create new product
*/
async create(productData) {
const product = await Product.create(productData);
// Invalidate list cache
await cacheService.delPattern('products:list:*');
return product;
}
/**
* Update product and invalidate cache
*/
async update(id, updates) {
const product = await Product.findByIdAndUpdate(id, updates, { new: true });
if (!product) {
throw new Error('Product not found');
}
// Invalidate specific product and list caches
await cacheService.del(`products:${id}`);
await cacheService.delPattern('products:list:*');
return product;
}
/**
* Delete product
*/
async delete(id) {
const product = await Product.findByIdAndDelete(id);
if (!product) {
throw new Error('Product not found');
}
// Invalidate caches
await cacheService.del(`products:${id}`);
await cacheService.delPattern('products:list:*');
return product;
}
}
module.exports = new ProductService();
Exercise: Create a product controller that uses the productService and implements the following endpoints:
- GET /api/products - List all products (with optional category filter)
- GET /api/products/:id - Get single product
- POST /api/products - Create new product
- PUT /api/products/:id - Update product
- DELETE /api/products/:id - Delete product
Part 1 Summary: We've built the foundation of our caching layer with Redis connection management, a reusable cache service implementing cache-aside pattern, Express middleware for automatic caching, and a product service with integrated cache invalidation. In Part 2, we'll add session management, rate limiting, and real-time features.