Skip to content

Caching Strategies

  • You need to improve query performance with caching
  • You want to understand cache invalidation strategies
  • You’re implementing stale-while-revalidate patterns
  • You need to cache expensive computations or external API calls
import { Cacheable, QueryHandler, QueryHandlerDecorator } from '@banyanai/platform-base-service';
import type { AuthenticatedUser } from '@banyanai/platform-core';
@Cacheable({
ttl: 300, // Cache for 5 minutes
key: (query: GetUserQuery) => `user_${query.userId}`,
tags: ['users'],
staleWhileRevalidate: true
})
@QueryHandlerDecorator(GetUserQuery)
export class GetUserHandler extends QueryHandler<GetUserQuery, GetUserResult> {
async handle(query: GetUserQuery, user: AuthenticatedUser | null): Promise<GetUserResult> {
// Results automatically cached - no infrastructure code needed!
const userReadModel = await UserReadModel.findById<UserReadModel>(query.userId);
if (!userReadModel) {
return { success: false, error: 'User not found' };
}
return {
success: true,
user: this.mapReadModelToDto(userReadModel)
};
}
}

First call: Executes query, stores in cache Subsequent calls (within TTL): Returns cached result instantly After TTL: Re-executes query, updates cache

@Cacheable({
ttl: 300, // Time to live in seconds
key: (query) => `cache_key_${query.id}`, // Cache key generator
tags: ['users'], // Cache tags for invalidation
staleWhileRevalidate: true // Return stale while updating
})
@Cacheable({
ttl: 300,
key: (query: GetUserQuery) => `user_${query.userId}`
})
@Cacheable({
ttl: 300,
key: (query: SearchUsersQuery) =>
`users_search_${query.searchTerm}_${query.filter?.isActive}_page${query.page}`
})
@Cacheable({
ttl: 300,
key: (query: GetUserOrdersQuery, user: AuthenticatedUser) =>
`orders_user${user.userId}_customer${query.customerId}`
})
// Cache with tags
@Cacheable({
ttl: 300,
key: (query: GetUserQuery) => `user_${query.userId}`,
tags: ['users', `user:${query.userId}`]
})
// Invalidate all user caches
await cacheManager.invalidateTags(['users']);
// Invalidate specific user
await cacheManager.invalidateTags([`user:${userId}`]);
// Automatically invalidate cache when events occur
@EventHandlerDecorator(UserUpdatedEvent)
export class InvalidateUserCacheHandler extends EventHandler<UserUpdatedEvent, void> {
async handle(event: UserUpdatedEvent): Promise<void> {
await cacheManager.invalidateTags([
'users',
`user:${event.userId}`
]);
}
}

Cache automatically populated on miss.

@Cacheable({ ttl: 300, key: (q) => `user_${q.userId}` })
@QueryHandlerDecorator(GetUserQuery)
export class GetUserHandler extends QueryHandler<GetUserQuery, GetUserResult> {
async handle(query: GetUserQuery, user: AuthenticatedUser | null): Promise<GetUserResult> {
// Cache miss → execute query → store result → return
// Cache hit → return cached result
const userReadModel = await UserReadModel.findById<UserReadModel>(query.userId);
return { success: true, user: this.mapReadModelToDto(userReadModel) };
}
}

Update cache on writes.

@CommandHandlerDecorator(UpdateUserCommand)
export class UpdateUserHandler extends CommandHandler<UpdateUserCommand, UpdateUserResult> {
async handle(command: UpdateUserCommand, user: AuthenticatedUser | null): Promise<UpdateUserResult> {
// Update aggregate
const events = await eventStore.getEvents(command.userId);
const userAggregate = User.fromEvents(events);
userAggregate.updateProfile(command.profile);
await eventStore.append(userAggregate.id, userAggregate.getUncommittedEvents());
// Invalidate cache
await cacheManager.invalidate(`user_${command.userId}`);
return { success: true };
}
}

Check cache, load on miss.

async getUser(userId: string): Promise<UserDto | null> {
// Check cache
const cached = await cacheManager.get<UserDto>(`user_${userId}`);
if (cached) {
return cached;
}
// Load from read model
const userReadModel = await UserReadModel.findById<UserReadModel>(userId);
if (!userReadModel) {
return null;
}
const userDto = this.mapReadModelToDto(userReadModel);
// Store in cache
await cacheManager.set(`user_${userId}`, userDto, 300);
return userDto;
}

Return stale data while updating in background.

@Cacheable({
ttl: 300,
key: (q) => `user_${q.userId}`,
staleWhileRevalidate: true // Key setting
})
@QueryHandlerDecorator(GetUserQuery)
export class GetUserHandler extends QueryHandler<GetUserQuery, GetUserResult> {
async handle(query: GetUserQuery, user: AuthenticatedUser | null): Promise<GetUserResult> {
// If cache expired but has stale data:
// 1. Return stale data immediately
// 2. Refresh in background
// 3. Update cache for next request
const userReadModel = await UserReadModel.findById<UserReadModel>(query.userId);
return { success: true, user: this.mapReadModelToDto(userReadModel) };
}
}

For frequently changing data:

@Cacheable({
ttl: 60, // 1 minute
key: (q) => `inventory_${q.productId}`
})
// Inventory levels change often

For moderately stable data:

@Cacheable({
ttl: 300, // 5 minutes
key: (q) => `user_${q.userId}`
})
// User profiles change occasionally

For rarely changing data:

@Cacheable({
ttl: 3600, // 1 hour
key: (q) => `product_${q.productId}`
})
// Product catalog changes rarely

For data that only changes on specific events:

@Cacheable({
ttl: 86400, // 24 hours (effectively infinite)
key: (q) => `config_${q.key}`,
tags: ['config']
})
// Configuration - invalidate on ConfigUpdated event

Pre-populate cache for better performance.

// Warm cache on service startup
export class CacheWarmupService {
async warmCache(): Promise<void> {
// Load common data into cache
const popularProducts = await ProductReadModel.findPopular();
for (const product of popularProducts) {
await cacheManager.set(
`product_${product.id}`,
product,
3600
);
}
Logger.info('Cache warmed', { products: popularProducts.length });
}
}
// Process memory cache
const inMemoryCache = new Map<string, any>();
// Redis cache (shared across instances)
await redisClient.set(key, JSON.stringify(value), 'EX', ttl);
// PostgreSQL read model (persistent)
const user = await UserReadModel.findById(userId);
describe('GetUserHandler with caching', () => {
let handler: GetUserHandler;
let mockCache: jest.Mocked<CacheManager>;
beforeEach(() => {
mockCache = {
get: jest.fn().mockResolvedValue(null),
set: jest.fn().mockResolvedValue(undefined),
invalidate: jest.fn().mockResolvedValue(undefined)
} as any;
handler = new GetUserHandler();
});
it('should cache query results', async () => {
const query = new GetUserQuery('user-123');
// First call - cache miss
await handler.handle(query, null);
expect(mockCache.set).toHaveBeenCalledWith(
'user_user-123',
expect.any(Object),
300
);
// Second call - cache hit
mockCache.get.mockResolvedValue({ id: 'user-123', email: 'test@example.com' });
const result = await handler.handle(query, null);
expect(result.success).toBe(true);
expect(mockCache.get).toHaveBeenCalledWith('user_user-123');
});
});

Don’t cache everything

// DON'T DO THIS
@Cacheable({ ttl: 300, key: () => 'all_users' })
// This caches ALL users - huge memory waste!

Cache specific queries

// DO THIS
@Cacheable({ ttl: 300, key: (q) => `user_${q.userId}` })
// Only cache specific user lookups

Don’t use stale data for critical operations

// DON'T DO THIS
@Cacheable({ ttl: 3600, staleWhileRevalidate: true })
// For payment processing - always need fresh data!

No caching for critical operations

// DO THIS - No caching
async processPayment(command: ProcessPaymentCommand): Promise<PaymentResult> {
// Always use fresh data for payments
}

Don’t forget invalidation

// DON'T DO THIS
// Update user without invalidating cache
await userRepository.update(userId, { email: newEmail });
// Cache still has old email!

Invalidate on updates

// DO THIS
await userRepository.update(userId, { email: newEmail });
await cacheManager.invalidate(`user_${userId}`);

Monitor cache effectiveness:

const hitRate = cacheHits / (cacheHits + cacheMisses) * 100;
// Target: 80%+ hit rate for effective caching

Compare cached vs uncached:

// Uncached: 50-100ms
// Cached: 1-5ms
// Improvement: 10-100x faster