Solidis LogoSolidis
← Back to Tutorials
Intermediate
25 min
Some Redis experience needed

Implementing a High-Performance Cache Layer

Create a robust caching layer to dramatically improve your application's performance by reducing database load and response times.

What You'll Learn
  • Cache-aside (lazy loading) pattern
  • Cache invalidation strategies
  • Cache warming and preloading
  • Monitoring cache hit rates
Prerequisites
  • Basic understanding of caching concepts
  • Familiarity with Redis data types
  • Database knowledge (PostgreSQL, MySQL, etc.)
1
Create Cache Manager
Build a flexible cache manager with multiple strategies
1import { SolidisFeaturedClient } from '@vcms-io/solidis/featured';
2
3export interface CacheOptions {
4  ttl?: number; // Time to live in seconds
5  prefix?: string;
6}
7
8export class CacheManager {
9  private client: SolidisFeaturedClient;
10  private prefix: string;
11  private defaultTTL: number;
12  private hits: number = 0;
13  private misses: number = 0;
14
15  constructor(options: {
16    host?: string;
17    port?: number;
18    prefix?: string;
19    defaultTTL?: number;
20  } = {}) {
21    this.client = new SolidisFeaturedClient({
22      host: options.host || '127.0.0.1',
23      port: options.port || 6379,
24    });
25    this.prefix = options.prefix || 'cache:';
26    this.defaultTTL = options.defaultTTL || 3600; // 1 hour default
27  }
28
29  async connect(): Promise<void> {
30    await this.client.connect();
31  }
32
33  async disconnect(): Promise<void> {
34    await this.client.quit();
35  }
36
37  private getKey(key: string): string {
38    return `${this.prefix}${key}`;
39  }
40
41  /**
42   * Get value from cache
43   */
44  async get<T = any>(key: string): Promise<T | null> {
45    const cacheKey = this.getKey(key);
46    const value = await this.client.get(cacheKey);
47
48    if (value) {
49      this.hits++;
50      return JSON.parse(value.toString()) as T;
51    }
52
53    this.misses++;
54    return null;
55  }
56
57  /**
58   * Set value in cache
59   */
60  async set<T = any>(
61    key: string,
62    value: T,
63    options: CacheOptions = {}
64  ): Promise<void> {
65    const cacheKey = this.getKey(key);
66    const ttl = options.ttl || this.defaultTTL;
67
68    await this.client.set(
69      cacheKey,
70      JSON.stringify(value),
71      { EX: ttl }
72    );
73  }
74
75  /**
76   * Cache-aside pattern: Get from cache or fetch from source
77   */
78  async getOrSet<T = any>(
79    key: string,
80    fetchFn: () => Promise<T>,
81    options: CacheOptions = {}
82  ): Promise<T> {
83    // Try cache first
84    const cached = await this.get<T>(key);
85    if (cached !== null) {
86      return cached;
87    }
88
89    // Cache miss - fetch from source
90    const value = await fetchFn();
91    await this.set(key, value, options);
92
93    return value;
94  }
95
96  /**
97   * Delete from cache
98   */
99  async delete(key: string): Promise<boolean> {
100    const cacheKey = this.getKey(key);
101    const result = await this.client.del(cacheKey);
102    return result > 0;
103  }
104
105  /**
106   * Delete multiple keys by pattern
107   */
108  async deletePattern(pattern: string): Promise<number> {
109    const keys: string[] = [];
110    let cursor = 0;
111
112    do {
113      const result = await this.client.scan(
114        cursor,
115        { MATCH: `${this.prefix}${pattern}`, COUNT: 100 }
116      );
117      cursor = result[0];
118      keys.push(...result[1].map((k: Buffer) => k.toString()));
119    } while (cursor !== 0);
120
121    if (keys.length === 0) {
122      return 0;
123    }
124
125    return await this.client.del(...keys);
126  }
127
128  /**
129   * Check if key exists in cache
130   */
131  async exists(key: string): Promise<boolean> {
132    const cacheKey = this.getKey(key);
133    const result = await this.client.exists(cacheKey);
134    return result === 1;
135  }
136
137  /**
138   * Extend TTL of existing key
139   */
140  async touch(key: string, ttl?: number): Promise<boolean> {
141    const cacheKey = this.getKey(key);
142    const expiry = ttl || this.defaultTTL;
143    const result = await this.client.expire(cacheKey, expiry);
144    return result === 1;
145  }
146
147  /**
148   * Get cache statistics
149   */
150  getStats() {
151    const total = this.hits + this.misses;
152    const hitRate = total > 0 ? (this.hits / total) * 100 : 0;
153
154    return {
155      hits: this.hits,
156      misses: this.misses,
157      total,
158      hitRate: hitRate.toFixed(2) + '%',
159    };
160  }
161
162  /**
163   * Reset statistics
164   */
165  resetStats() {
166    this.hits = 0;
167    this.misses = 0;
168  }
169}
2
Integrate with Database
Example with a user repository pattern
1import { CacheManager } from './cache-manager';
2
3interface User {
4  id: string;
5  username: string;
6  email: string;
7  name: string;
8}
9
10export class UserRepository {
11  private cache: CacheManager;
12  private db: any; // Your database client
13
14  constructor(cache: CacheManager, db: any) {
15    this.cache = cache;
16    this.db = db;
17  }
18
19  /**
20   * Get user by ID with caching
21   */
22  async getUserById(userId: string): Promise<User | null> {
23    const cacheKey = `user:${userId}`;
24
25    return await this.cache.getOrSet(
26      cacheKey,
27      async () => {
28        // Fetch from database
29        const user = await this.db.query(
30          'SELECT * FROM users WHERE id = $1',
31          [userId]
32        );
33        return user.rows[0] || null;
34      },
35      { ttl: 300 } // 5 minutes
36    );
37  }
38
39  /**
40   * Get multiple users with batch caching
41   */
42  async getUsersByIds(userIds: string[]): Promise<User[]> {
43    const users: User[] = [];
44    const uncachedIds: string[] = [];
45
46    // Check cache for each user
47    for (const userId of userIds) {
48      const cacheKey = `user:${userId}`;
49      const cached = await this.cache.get<User>(cacheKey);
50
51      if (cached) {
52        users.push(cached);
53      } else {
54        uncachedIds.push(userId);
55      }
56    }
57
58    // Fetch uncached users from database
59    if (uncachedIds.length > 0) {
60      const result = await this.db.query(
61        'SELECT * FROM users WHERE id = ANY($1)',
62        [uncachedIds]
63      );
64
65      // Cache fetched users
66      for (const user of result.rows) {
67        users.push(user);
68        await this.cache.set(
69          `user:${user.id}`,
70          user,
71          { ttl: 300 }
72        );
73      }
74    }
75
76    return users;
77  }
78
79  /**
80   * Update user with cache invalidation
81   */
82  async updateUser(userId: string, data: Partial<User>): Promise<User> {
83    // Update database
84    const result = await this.db.query(
85      'UPDATE users SET username = $1, email = $2, name = $3 WHERE id = $4 RETURNING *',
86      [data.username, data.email, data.name, userId]
87    );
88
89    const user = result.rows[0];
90
91    // Invalidate cache
92    await this.cache.delete(`user:${userId}`);
93
94    // Optionally: warm cache with new data
95    await this.cache.set(`user:${userId}`, user, { ttl: 300 });
96
97    return user;
98  }
99
100  /**
101   * Delete user with cache invalidation
102   */
103  async deleteUser(userId: string): Promise<boolean> {
104    // Delete from database
105    await this.db.query('DELETE FROM users WHERE id = $1', [userId]);
106
107    // Invalidate cache
108    await this.cache.delete(`user:${userId}`);
109
110    return true;
111  }
112
113  /**
114   * Search users (with result caching)
115   */
116  async searchUsers(query: string): Promise<User[]> {
117    const cacheKey = `search:users:${query}`;
118
119    return await this.cache.getOrSet(
120      cacheKey,
121      async () => {
122        const result = await this.db.query(
123          'SELECT * FROM users WHERE username ILIKE $1 OR email ILIKE $1 LIMIT 20',
124          [`%${query}%`]
125        );
126        return result.rows;
127      },
128      { ttl: 60 } // 1 minute (shorter for search results)
129    );
130  }
131}
3
Cache Warming Strategy
Preload frequently accessed data
1export class CacheWarmer {
2  private cache: CacheManager;
3  private db: any;
4
5  constructor(cache: CacheManager, db: any) {
6    this.cache = cache;
7    this.db = db;
8  }
9
10  /**
11   * Warm cache with popular users
12   */
13  async warmPopularUsers(limit: number = 100): Promise<void> {
14    const result = await this.db.query(
15      `SELECT * FROM users
16       ORDER BY last_login DESC
17       LIMIT $1`,
18      [limit]
19    );
20
21    const promises = result.rows.map((user: User) =>
22      this.cache.set(`user:${user.id}`, user, { ttl: 3600 })
23    );
24
25    await Promise.all(promises);
26    console.log(`Warmed cache with ${result.rows.length} users`);
27  }
28
29  /**
30   * Warm cache with featured content
31   */
32  async warmFeaturedContent(): Promise<void> {
33    const result = await this.db.query(
34      'SELECT * FROM posts WHERE featured = true'
35    );
36
37    const promises = result.rows.map((post: any) =>
38      this.cache.set(`post:${post.id}`, post, { ttl: 7200 })
39    );
40
41    await Promise.all(promises);
42    console.log(`Warmed cache with ${result.rows.length} featured posts`);
43  }
44
45  /**
46   * Schedule periodic cache warming
47   */
48  startPeriodicWarming(intervalMs: number = 3600000): NodeJS.Timer {
49    return setInterval(async () => {
50      try {
51        await this.warmPopularUsers();
52        await this.warmFeaturedContent();
53        console.log('Cache warming completed');
54      } catch (error) {
55        console.error('Cache warming failed:', error);
56      }
57    }, intervalMs);
58  }
59}
4
Complete Usage Example
Putting it all together in your application
1import express from 'express';
2import { CacheManager } from './cache-manager';
3import { UserRepository } from './user-repository';
4import { CacheWarmer } from './cache-warmer';
5import { Pool } from 'pg';
6
7const app = express();
8const db = new Pool({
9  connectionString: process.env.DATABASE_URL,
10});
11
12const cache = new CacheManager({
13  host: '127.0.0.1',
14  port: 6379,
15  prefix: 'myapp:',
16  defaultTTL: 3600,
17});
18
19const userRepo = new UserRepository(cache, db);
20const warmer = new CacheWarmer(cache, db);
21
22app.use(express.json());
23
24// Get user endpoint (with caching)
25app.get('/api/users/:id', async (req, res) => {
26  try {
27    const user = await userRepo.getUserById(req.params.id);
28
29    if (!user) {
30      return res.status(404).json({ error: 'User not found' });
31    }
32
33    res.json(user);
34  } catch (error) {
35    res.status(500).json({ error: 'Internal server error' });
36  }
37});
38
39// Update user endpoint (with cache invalidation)
40app.put('/api/users/:id', async (req, res) => {
41  try {
42    const user = await userRepo.updateUser(req.params.id, req.body);
43    res.json(user);
44  } catch (error) {
45    res.status(500).json({ error: 'Internal server error' });
46  }
47});
48
49// Search users endpoint (with cached results)
50app.get('/api/users/search', async (req, res) => {
51  try {
52    const query = req.query.q as string;
53    const users = await userRepo.searchUsers(query);
54    res.json(users);
55  } catch (error) {
56    res.status(500).json({ error: 'Internal server error' });
57  }
58});
59
60// Cache stats endpoint
61app.get('/api/cache/stats', (req, res) => {
62  const stats = cache.getStats();
63  res.json(stats);
64});
65
66// Start server
67async function start() {
68  await cache.connect();
69
70  // Warm cache on startup
71  await warmer.warmPopularUsers(100);
72  await warmer.warmFeaturedContent();
73
74  // Schedule periodic warming
75  warmer.startPeriodicWarming(3600000); // Every hour
76
77  app.listen(3000, () => {
78    console.log('Server running on http://localhost:3000');
79  });
80}
81
82start().catch(console.error);
Performance Optimization Tips
  • Use appropriate TTL values
    Longer TTL for static data, shorter for frequently changing data
  • Implement cache stampede protection
    Use locks to prevent multiple simultaneous cache misses
  • Monitor cache hit rates
    Aim for 80%+ hit rate for optimal performance
  • Use pipelining for batch operations
    Reduce network round trips when caching multiple items
  • Implement tiered caching
    Combine in-memory cache (Node.js) with Redis for maximum performance
Cache Invalidation Strategies

Time-based (TTL)

Best for: Data that changes predictably over time

Event-based

Best for: Data that changes based on user actions

Pattern-based

Best for: Invalidating related cache entries

Write-through

Best for: Data consistency requirements