Skip to content

Caching Middleware

The caching middleware provides performance optimization for content operations by storing operation results in memory or persistent storage. It reduces redundant operations and improves response times for frequently accessed content.

Overview

Caching middleware intercepts content operations, particularly read operations, and checks if the result is already cached before executing the underlying operation. When an operation completes, it stores the result in the cache for future requests, creating a seamless acceleration layer that preserves all operation semantics.

typescript
import { withCaching } from '@lib/content/middleware'

// Create caching middleware
const cachingMiddleware = withCaching({
  ttl: 300000, // 5 minutes
  namespace: 'blog',
})

API Reference

withCaching

typescript
function withCaching(options?: CachingOptions): Middleware

Creates a new caching middleware instance.

Parameters:

  • options: Configuration options for caching

Returns:

  • A middleware function that caches operation results

CachingOptions

typescript
interface CachingOptions {
  /**
   * Time-to-live in milliseconds (default: 60000 - 1 minute)
   */
  ttl?: number

  /**
   * Cache namespace to prevent key collisions (default: 'content')
   */
  namespace?: string

  /**
   * Storage mechanism (default: 'memory')
   */
  storage?: 'memory' | 'localStorage' | 'sessionStorage' | 'indexedDB'

  /**
   * Maximum cache size in items (default: 100)
   */
  maxItems?: number

  /**
   * Eviction policy when capacity is reached (default: 'lru')
   */
  evictionPolicy?: 'lru' | 'fifo' | 'lfu'

  /**
   * Operations to cache (default: ['read', 'list'])
   */
  operations?: Array<'read' | 'list' | 'exists'>

  /**
   * Function to generate cache keys
   */
  keyGenerator?: (context: ContentContext) => string

  /**
   * Function to determine if result should be cached
   */
  shouldCache?: (result: any, context: ContentContext) => boolean

  /**
   * Compression settings for cache entries
   */
  compression?: {
    /**
     * Enable compression (default: false)
     */
    enabled: boolean

    /**
     * Minimum size in bytes to apply compression (default: 1024)
     */
    threshold?: number
  }

  /**
   * Cache invalidation config
   */
  invalidation?: {
    /**
     * Automatically invalidate cache on write operations (default: true)
     */
    invalidateOnWrite?: boolean

    /**
     * Automatically invalidate cache on delete operations (default: true)
     */
    invalidateOnDelete?: boolean

    /**
     * Function to generate invalidation patterns
     */
    patternGenerator?: (uri: string) => string
  }
}

Cache Implementation

The caching middleware uses a pluggable cache implementation based on the storage option:

Memory Cache (Default)

typescript
class MemoryCache implements Cache {
  private entries = new Map<string, { value: any; expires: number }>()
  private capacity: number
  private evictionPolicy: 'lru' | 'fifo' | 'lfu'
  private accessCount = new Map<string, number>()
  private insertOrder: string[] = []

  constructor(options: {
    capacity?: number
    evictionPolicy?: 'lru' | 'fifo' | 'lfu'
  }) {
    this.capacity = options.capacity || 100
    this.evictionPolicy = options.evictionPolicy || 'lru'
  }

  async get(key: string): Promise<any> {
    const entry = this.entries.get(key)

    // Return undefined if not in cache or expired
    if (!entry || entry.expires < Date.now()) {
      if (entry) {
        this.entries.delete(key)
        this.accessCount.delete(key)
        this.insertOrder = this.insertOrder.filter(k => k !== key)
      }
      return undefined
    }

    // Update access count for LFU and LRU policies
    if (this.evictionPolicy === 'lfu' || this.evictionPolicy === 'lru') {
      this.accessCount.set(key, (this.accessCount.get(key) || 0) + 1)
    }

    // For LRU, move to end of insert order
    if (this.evictionPolicy === 'lru') {
      this.insertOrder = this.insertOrder.filter(k => k !== key)
      this.insertOrder.push(key)
    }

    return entry.value
  }

  async set(key: string, value: any, ttl?: number): Promise<void> {
    // Check if we need to evict an entry
    if (this.entries.size >= this.capacity && !this.entries.has(key)) {
      this.evictEntry()
    }

    // Calculate expiry time
    const expires = ttl ? Date.now() + ttl : Number.MAX_SAFE_INTEGER

    // Store entry
    this.entries.set(key, { value, expires })

    // Initialize access count
    this.accessCount.set(key, 0)

    // Add to insert order
    if (!this.insertOrder.includes(key)) {
      this.insertOrder.push(key)
    }
  }

  async delete(key: string): Promise<void> {
    this.entries.delete(key)
    this.accessCount.delete(key)
    this.insertOrder = this.insertOrder.filter(k => k !== key)
  }

  async clear(): Promise<void> {
    this.entries.clear()
    this.accessCount.clear()
    this.insertOrder = []
  }

  private evictEntry(): void {
    let keyToEvict: string | undefined

    switch (this.evictionPolicy) {
      case 'lru':
        // Least recently used: first item in insert order
        keyToEvict = this.insertOrder[0]
        break

      case 'fifo':
        // First in, first out: first item in insert order
        keyToEvict = this.insertOrder[0]
        break

      case 'lfu':
        // Least frequently used: item with lowest access count
        let minCount = Number.MAX_SAFE_INTEGER
        for (const [key, count] of this.accessCount.entries()) {
          if (count < minCount) {
            minCount = count
            keyToEvict = key
          }
        }
        break
    }

    if (keyToEvict) {
      this.delete(keyToEvict)
    }
  }
}

Local Storage Cache

typescript
class LocalStorageCache implements Cache {
  private prefix: string
  private capacity: number

  constructor(options: { namespace?: string; capacity?: number }) {
    this.prefix = `cache:${options.namespace || 'content'}:`
    this.capacity = options.capacity || 100
  }

  async get(key: string): Promise<any> {
    const cacheKey = this.prefix + key
    const json = localStorage.getItem(cacheKey)

    if (!json) {
      return undefined
    }

    try {
      const entry = JSON.parse(json)

      // Check if expired
      if (entry.expires && entry.expires < Date.now()) {
        localStorage.removeItem(cacheKey)
        return undefined
      }

      return entry.value
    } catch (error) {
      console.warn('Cache parse error:', error)
      localStorage.removeItem(cacheKey)
      return undefined
    }
  }

  async set(key: string, value: any, ttl?: number): Promise<void> {
    // Enforce capacity limit
    if (this.isAtCapacity()) {
      this.evictOldest()
    }

    const cacheKey = this.prefix + key
    const entry = {
      value,
      expires: ttl ? Date.now() + ttl : undefined,
      created: Date.now(),
    }

    try {
      localStorage.setItem(cacheKey, JSON.stringify(entry))
    } catch (error) {
      console.warn('Cache write error:', error)

      // If storage is full, evict some entries and try again
      if (
        error instanceof DOMException &&
        error.name === 'QuotaExceededError'
      ) {
        this.evictMultiple(5)
        try {
          localStorage.setItem(cacheKey, JSON.stringify(entry))
        } catch (retryError) {
          console.error('Cache write failed after eviction:', retryError)
        }
      }
    }
  }

  async delete(key: string): Promise<void> {
    const cacheKey = this.prefix + key
    localStorage.removeItem(cacheKey)
  }

  async clear(): Promise<void> {
    for (let i = 0; i < localStorage.length; i++) {
      const key = localStorage.key(i)
      if (key && key.startsWith(this.prefix)) {
        localStorage.removeItem(key)
      }
    }
  }

  private isAtCapacity(): boolean {
    let count = 0
    for (let i = 0; i < localStorage.length; i++) {
      const key = localStorage.key(i)
      if (key && key.startsWith(this.prefix)) {
        count++
      }
    }
    return count >= this.capacity
  }

  private evictOldest(): void {
    let oldest: { key: string; created: number } | null = null

    for (let i = 0; i < localStorage.length; i++) {
      const key = localStorage.key(i)
      if (key && key.startsWith(this.prefix)) {
        try {
          const json = localStorage.getItem(key)
          if (json) {
            const entry = JSON.parse(json)
            if (!oldest || (entry.created && entry.created < oldest.created)) {
              oldest = { key, created: entry.created }
            }
          }
        } catch (error) {
          // If we can't parse an entry, it's a good candidate for eviction
          oldest = { key, created: 0 }
        }
      }
    }

    if (oldest) {
      localStorage.removeItem(oldest.key)
    }
  }

  private evictMultiple(count: number): void {
    const entries = []

    for (let i = 0; i < localStorage.length; i++) {
      const key = localStorage.key(i)
      if (key && key.startsWith(this.prefix)) {
        try {
          const json = localStorage.getItem(key)
          if (json) {
            const entry = JSON.parse(json)
            entries.push({
              key,
              created: entry.created || 0,
            })
          }
        } catch (error) {
          entries.push({
            key,
            created: 0,
          })
        }
      }
    }

    // Sort by creation time (oldest first)
    entries.sort((a, b) => a.created - b.created)

    // Evict the oldest entries
    for (let i = 0; i < Math.min(count, entries.length); i++) {
      localStorage.removeItem(entries[i].key)
    }
  }
}

Caching Process

The caching middleware follows these steps when processing content operations:

  1. Generate Cache Key: Create a unique key for the operation
  2. Check Cache: Look for cached results
  3. Return Early: Return cached results if found
  4. Execute Operation: Otherwise, execute the underlying operation
  5. Cache Result: Store operation result in cache
  6. Return Result: Return the operation result
typescript
// Simplified implementation
function withCaching(options: CachingOptions = {}): Middleware {
  const {
    ttl = 60000,
    namespace = 'content',
    operations = ['read', 'list'],
    storage = 'memory',
    maxItems = 100,
    evictionPolicy = 'lru',
  } = options

  // Create appropriate cache implementation
  const cache = createCache({
    storage,
    namespace,
    capacity: maxItems,
    evictionPolicy,
  })

  return async (context, next) => {
    // Skip caching for non-cacheable operations
    if (!operations.includes(context.operation)) {
      return next()
    }

    // Generate cache key
    const cacheKey = options.keyGenerator
      ? options.keyGenerator(context)
      : `${namespace}:${context.operation}:${context.uri}`

    // Check cache
    try {
      const cached = await cache.get(cacheKey)
      if (cached) {
        // Return cached result
        if (context.operation === 'read') {
          context.content = cached
        } else if (context.operation === 'list') {
          context.results = cached
        } else if (context.operation === 'exists') {
          context.exists = cached
        }

        return context
      }
    } catch (error) {
      // Log but continue with operation
      console.warn(`Cache read error for ${cacheKey}:`, error)
    }

    // Execute operation
    const result = await next()

    // Cache result
    try {
      let valueToCache

      if (context.operation === 'read') {
        valueToCache = context.content
      } else if (context.operation === 'list') {
        valueToCache = context.results
      } else if (context.operation === 'exists') {
        valueToCache = context.exists
      }

      // Check if result should be cached
      if (
        valueToCache &&
        (!options.shouldCache || options.shouldCache(valueToCache, context))
      ) {
        await cache.set(cacheKey, valueToCache, ttl)
      }
    } catch (error) {
      // Log but don't fail the operation
      console.warn(`Cache write error for ${cacheKey}:`, error)
    }

    return result
  }
}

Cache Invalidation

The caching middleware supports automatic cache invalidation for write and delete operations:

typescript
// Cache invalidation for write operations
if (
  context.operation === 'write' &&
  options.invalidation?.invalidateOnWrite !== false
) {
  const uriPattern = options.invalidation?.patternGenerator
    ? options.invalidation.patternGenerator(context.uri)
    : context.uri

  await invalidateCache(cache, namespace, uriPattern)
}

// Cache invalidation for delete operations
if (
  context.operation === 'delete' &&
  options.invalidation?.invalidateOnDelete !== false
) {
  const uriPattern = options.invalidation?.patternGenerator
    ? options.invalidation.patternGenerator(context.uri)
    : context.uri

  await invalidateCache(cache, namespace, uriPattern)
}

// Helper function to invalidate cache entries
async function invalidateCache(
  cache: Cache,
  namespace: string,
  uriPattern: string
): Promise<void> {
  // Memory cache can use pattern invalidation
  if (cache instanceof MemoryCache) {
    cache.invalidatePattern(`${namespace}:*:${uriPattern}`)
    cache.invalidatePattern(`${namespace}:list:*`) // Invalidate list results
    return
  }

  // For other caches, invalidate exact key
  await cache.delete(`${namespace}:read:${uriPattern}`)
  await cache.delete(`${namespace}:exists:${uriPattern}`)

  // For localStorage and IndexedDB, list results are harder to invalidate selectively
  if (cache instanceof LocalStorageCache || cache instanceof IndexedDBCache) {
    // Instead of invalidating all list results, we could be smarter
    // For now, just invalidate common patterns
    await cache.delete(`${namespace}:list:*`)
    await cache.delete(`${namespace}:list:**`)
  }
}

Content Compression

When compression is enabled, the caching middleware compresses cached content to reduce memory usage:

typescript
// Compress content before caching
async function compressContent(
  content: any,
  options: CachingOptions
): Promise<any> {
  // Only compress if enabled and content is large enough
  if (
    !options.compression?.enabled ||
    !content ||
    typeof content !== 'object'
  ) {
    return content
  }

  // Determine content size
  const contentString = JSON.stringify(content)
  const contentSize = contentString.length

  // Skip small content
  if (contentSize < (options.compression.threshold || 1024)) {
    return content
  }

  // Clone content to avoid modifying the original
  const cloned = JSON.parse(contentString)

  // Compress data field if it's a string
  if (cloned.data && typeof cloned.data === 'string') {
    try {
      // Use compression library like lz-string
      cloned.data = LZString.compress(cloned.data)
      cloned._compressed = true
    } catch (error) {
      console.warn('Content compression error:', error)
    }
  }

  return cloned
}

// Decompress content after retrieving from cache
function decompressContent(content: any): any {
  if (!content || typeof content !== 'object' || !content._compressed) {
    return content
  }

  // Clone to avoid modifying the cached value
  const cloned = { ...content }

  // Decompress data field
  if (cloned.data) {
    try {
      cloned.data = LZString.decompress(cloned.data)
    } catch (error) {
      console.warn('Content decompression error:', error)
    }
  }

  // Remove compression flag
  delete cloned._compressed

  return cloned
}

Performance Considerations

The caching middleware includes performance optimizations:

  • Selective Caching: Only caches specific operations by default
  • TTL Expiration: Automatically removes expired entries
  • Capacity Limiting: Prevents memory leaks with size constraints
  • Memory-efficient Storage: Options for different storage backends
  • Compression: Optional compression for large content
  • Pattern Invalidation: Smart invalidation of related cache entries

Advanced Usage

Tiered Caching

typescript
import { composeMiddleware } from '@lib/content/middleware'

// Create tiered caching with different TTLs
const tieredCaching = composeMiddleware(
  // Fast in-memory cache for hot data (1 minute)
  withCaching({
    ttl: 60000,
    namespace: 'memory',
    storage: 'memory',
    maxItems: 100,
  }),

  // Longer-lived localStorage cache (1 hour)
  withCaching({
    ttl: 3600000,
    namespace: 'persistent',
    storage: 'localStorage',
    maxItems: 500,
  })
)

Conditional Caching

typescript
import { withCaching, conditionalMiddleware } from '@lib/content/middleware'

// Only cache Markdown content
const markdownCache = conditionalMiddleware(
  context =>
    context.content?.contentType === 'text/markdown' ||
    context.uri.endsWith('.md'),
  withCaching({ namespace: 'markdown' })
)

// Different caching by content type
const contentTypeCache = composeMiddleware(
  conditionalMiddleware(
    context => context.content?.contentType === 'text/markdown',
    withCaching({ namespace: 'markdown', ttl: 300000 })
  ),
  conditionalMiddleware(
    context => context.content?.contentType === 'application/json',
    withCaching({ namespace: 'json', ttl: 120000 })
  )
)

Cache Preloading

typescript
// Preload cache with known content
async function preloadCache(adapter, cache) {
  // Get list of frequently accessed content
  const frequentUris = await getFrequentUris()

  // Preload each item
  for (const uri of frequentUris) {
    try {
      const content = await adapter.read(uri)
      await cache.set(`content:read:${uri}`, content, 3600000)
    } catch (error) {
      console.warn(`Failed to preload ${uri}:`, error)
    }
  }

  console.log(`Preloaded ${frequentUris.length} items into cache`)
}

Examples

Basic Caching

typescript
import { withCaching } from '@lib/content/middleware'
import { createContentStore } from '@lib/content'

// Create store with caching
const store = createContentStore({
  adapter: createFileSystemAdapter({ basePath: './content' }),
  middleware: [
    withCaching({ ttl: 300000 }), // 5 minutes
  ],
})

// First read will hit the filesystem
const content1 = await store.read('articles/welcome.md')

// Second read will return from cache (much faster)
const content2 = await store.read('articles/welcome.md')

Caching with Custom Storage

typescript
import { withCaching } from '@lib/content/middleware'
import { createMemoryAdapter } from '@lib/content/adapters'

// Create custom cache storage
class RedisCache implements Cache {
  private client
  private prefix

  constructor(options) {
    this.client = createRedisClient(options.redisUrl)
    this.prefix = options.namespace || 'content'
  }

  async get(key) {
    const cacheKey = `${this.prefix}:${key}`
    const json = await this.client.get(cacheKey)

    if (!json) {
      return undefined
    }

    const data = JSON.parse(json)

    // Check expiration
    if (data.expires && data.expires < Date.now()) {
      await this.client.del(cacheKey)
      return undefined
    }

    return data.value
  }

  async set(key, value, ttl) {
    const cacheKey = `${this.prefix}:${key}`
    const data = {
      value,
      expires: ttl ? Date.now() + ttl : null,
    }

    if (ttl) {
      await this.client.setex(
        cacheKey,
        Math.ceil(ttl / 1000),
        JSON.stringify(data)
      )
    } else {
      await this.client.set(cacheKey, JSON.stringify(data))
    }
  }

  async delete(key) {
    await this.client.del(`${this.prefix}:${key}`)
  }

  async clear() {
    const keys = await this.client.keys(`${this.prefix}:*`)
    if (keys.length > 0) {
      await this.client.del(keys)
    }
  }
}

// Register custom cache factory
registerCacheStorage('redis', options => new RedisCache(options))

// Use custom cache storage
const cachingMiddleware = withCaching({
  storage: 'redis',
  namespace: 'blog',
  ttl: 3600000,
  redisUrl: 'redis://localhost:6379',
})

// Create adapter with caching
const adapter = createMemoryAdapter()
const cachedAdapter = applyCaching(adapter, cachingMiddleware)

Released under the MIT License.