Redis & Advanced Caching

Database Query Caching

20 min Lesson 17 of 30

Database Query Caching

Database query caching reduces database load by storing query results in Redis. This technique dramatically improves application performance by serving cached results instead of executing expensive queries repeatedly.

Why Cache Database Queries?

Database queries are often the slowest part of web applications. Caching query results provides instant responses and reduces database server load.

Performance Impact: Cached queries can be 100x faster than database queries, reducing response times from 100ms to 1ms.

ORM-Level Caching

Implement caching at the ORM level to transparently cache all queries without changing application code.

// Mongoose plugin for Redis caching\nconst redis = require('redis');\nconst client = redis.createClient();\n\nfunction cachePlugin(schema) {\n  schema.post('find', async function(docs) {\n    const key = `query:${this.getQuery()}`;\n    await client.setEx(key, 300, JSON.stringify(docs));\n  });\n  \n  schema.pre('find', async function() {\n    const key = `query:${this.getQuery()}`;\n    const cached = await client.get(key);\n    \n    if (cached) {\n      this._cachedResult = JSON.parse(cached);\n      return this._cachedResult;\n    }\n  });\n}\n\nUserSchema.plugin(cachePlugin);

Query Result Caching

Cache specific expensive queries with custom TTLs based on data freshness requirements.

const redis = require('redis');\nconst client = redis.createClient();\n\nasync function getCachedQuery(key, queryFn, ttl = 300) {\n  // Try cache first\n  const cached = await client.get(key);\n  if (cached) {\n    return JSON.parse(cached);\n  }\n  \n  // Execute query\n  const result = await queryFn();\n  \n  // Cache result\n  await client.setEx(key, ttl, JSON.stringify(result));\n  \n  return result;\n}\n\n// Usage\nconst users = await getCachedQuery(\n  'users:active',\n  () => User.find({ active: true }),\n  600 // 10 minutes\n);
Tip: Use longer TTLs for rarely-changing data (categories, settings) and shorter TTLs for frequently-updated data (user counts, recent posts).

Cache Warming

Proactively populate cache with frequently accessed data before users request it.

// Cache warming on startup\nasync function warmCache() {\n  console.log('Warming cache...');\n  \n  // Popular queries\n  const queries = [\n    { key: 'products:featured', fn: () => Product.find({ featured: true }) },\n    { key: 'categories:all', fn: () => Category.find() },\n    { key: 'users:top', fn: () => User.find().sort({ points: -1 }).limit(10) }\n  ];\n  \n  for (const { key, fn } of queries) {\n    const data = await fn();\n    await client.setEx(key, 3600, JSON.stringify(data));\n  }\n  \n  console.log('Cache warmed!');\n}\n\n// Run on server start\nwarmCache();

N+1 Query Caching

The N+1 query problem occurs when fetching related data in loops. Caching can mitigate this issue.

// Without caching - N+1 problem\nconst posts = await Post.find();\nfor (const post of posts) {\n  post.author = await User.findById(post.authorId); // N queries!\n}\n\n// With caching\nconst posts = await Post.find();\nconst authorIds = [...new Set(posts.map(p => p.authorId))];\n\n// Batch fetch with cache\nconst authors = await Promise.all(\n  authorIds.map(id => getCachedQuery(\n    `user:${id}`,\n    () => User.findById(id),\n    3600\n  ))\n);\n\nconst authorMap = Object.fromEntries(\n  authors.map(a => [a._id, a])\n);\n\nposts.forEach(post => {\n  post.author = authorMap[post.authorId];\n});
N+1 Solution: Combine DataLoader pattern with Redis caching for optimal performance with related data.

Cache Invalidation on Writes

Automatically invalidate cached queries when data changes to prevent stale data.

// Invalidate cache on model changes\nUserSchema.post('save', async function(doc) {\n  // Clear user-specific cache\n  await client.del(`user:${doc._id}`);\n  \n  // Clear list caches that might include this user\n  await client.del('users:active');\n  await client.del('users:all');\n});\n\nUserSchema.post('remove', async function(doc) {\n  await client.del(`user:${doc._id}`);\n  await client.del('users:active');\n});\n\n// Pattern-based invalidation\nasync function invalidatePattern(pattern) {\n  const keys = await client.keys(pattern);\n  if (keys.length > 0) {\n    await client.del(keys);\n  }\n}\n\n// Invalidate all user caches\nawait invalidatePattern('user:*');
Warning: The KEYS command can be slow on large datasets. Use SCAN for production environments or maintain a separate index of cache keys.

Mongoose Caching Implementation

Complete example of Mongoose query caching with automatic invalidation.

const mongoose = require('mongoose');\nconst redis = require('redis');\nconst client = redis.createClient();\n\n// Extend Query prototype\nmongoose.Query.prototype.cache = function(ttl = 300) {\n  this._cache = true;\n  this._cacheTTL = ttl;\n  return this;\n};\n\nconst exec = mongoose.Query.prototype.exec;\n\nmongoose.Query.prototype.exec = async function() {\n  if (!this._cache) {\n    return exec.apply(this, arguments);\n  }\n  \n  const key = JSON.stringify({\n    collection: this.mongooseCollection.name,\n    query: this.getQuery(),\n    options: this.getOptions()\n  });\n  \n  // Check cache\n  const cached = await client.get(key);\n  if (cached) {\n    const doc = JSON.parse(cached);\n    return Array.isArray(doc)\n      ? doc.map(d => new this.model(d))\n      : new this.model(doc);\n  }\n  \n  // Execute query\n  const result = await exec.apply(this, arguments);\n  \n  // Cache result\n  await client.setEx(key, this._cacheTTL, JSON.stringify(result));\n  \n  return result;\n};\n\n// Usage\nconst users = await User\n  .find({ active: true })\n  .cache(600); // Cache for 10 minutes

Sequelize Caching Implementation

Implement query caching for Sequelize ORM with similar patterns.

const Sequelize = require('sequelize');\nconst redis = require('redis');\nconst client = redis.createClient();\n\n// Wrapper function\nasync function cachedQuery(model, query, options = {}) {\n  const { ttl = 300, cacheKey } = options;\n  \n  const key = cacheKey || `${model.name}:${JSON.stringify(query)}`;\n  \n  // Check cache\n  const cached = await client.get(key);\n  if (cached) {\n    return JSON.parse(cached);\n  }\n  \n  // Execute query\n  const result = await model.findAll(query);\n  \n  // Cache result\n  await client.setEx(key, ttl, JSON.stringify(result));\n  \n  return result;\n}\n\n// Usage\nconst users = await cachedQuery(\n  User,\n  { where: { active: true } },\n  { ttl: 600, cacheKey: 'users:active' }\n);
Tip: Include query parameters and options in cache keys to ensure different queries don't collide.

Smart Cache Invalidation

Implement intelligent cache invalidation based on data relationships.

class CacheManager {\n  constructor(redisClient) {\n    this.client = redisClient;\n    this.dependencies = new Map();\n  }\n  \n  // Register cache dependencies\n  addDependency(entity, cacheKeys) {\n    this.dependencies.set(entity, cacheKeys);\n  }\n  \n  // Invalidate based on entity changes\n  async invalidateEntity(entity, id) {\n    const keys = this.dependencies.get(entity) || [];\n    \n    // Add entity-specific key\n    keys.push(`${entity}:${id}`);\n    \n    // Delete all related keys\n    if (keys.length > 0) {\n      await this.client.del(keys);\n    }\n  }\n}\n\n// Setup dependencies\nconst cache = new CacheManager(client);\ncache.addDependency('User', ['users:all', 'users:active']);\ncache.addDependency('Post', ['posts:recent', 'posts:featured']);\n\n// Invalidate on changes\nUser.afterSave(async (user) => {\n  await cache.invalidateEntity('User', user.id);\n});
Exercise: Create a caching system for a blog application. Cache post listings, individual posts, and author data. Implement automatic invalidation when posts are created, updated, or deleted. Add cache warming for the 10 most popular posts. Monitor cache hit rates and optimize TTLs based on data access patterns.