Node.js Integration Guide
This guide explains how to integrate the content system with Node.js applications. It covers setup, configuration, and common patterns for server-side usage.
Getting Started
Installation
Install the content system package:
npm install @inherent/content-system
# or with yarn
yarn add @inherent/content-system
# or with pnpm
pnpm add @inherent/content-system
Basic Setup
Create a basic content store in Node.js:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
// Create a content store with filesystem adapter
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: '/path/to/content',
}),
})
// Use the store
async function main() {
try {
// Read content
const content = await store.read('articles/welcome.md')
console.log('Content loaded:', content.metadata.title)
// Write content
await store.write('articles/new.md', {
data: '# New Article\n\nThis is a new article.',
contentType: 'text/markdown',
metadata: {
title: 'New Article',
createdAt: new Date(),
},
})
console.log('Content written successfully')
} catch (error) {
console.error('Error:', error)
}
}
main()
Configuration
Environment-Specific Configuration
Load configuration from environment variables:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import dotenv from 'dotenv'
// Load environment variables
dotenv.config()
// Create configuration from environment variables
const config = {
contentPath: process.env.CONTENT_PATH || path.join(process.cwd(), 'content'),
cacheEnabled: process.env.CACHE_ENABLED === 'true',
watchEnabled: process.env.WATCH_ENABLED === 'true',
logLevel: process.env.LOG_LEVEL || 'info',
}
// Create store with environment configuration
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: config.contentPath,
createMissingDirectories: true,
}),
middleware: [
// Add middleware based on configuration
config.cacheEnabled && createCacheMiddleware({ ttl: 3600 }),
createLoggingMiddleware({ level: config.logLevel }),
].filter(Boolean), // Remove falsy values
})
// Export configured store
export default store
Different Node.js Environments
Configure for different environments:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
// Determine environment
const environment = process.env.NODE_ENV || 'development'
// Environment-specific configuration
const envConfig = {
development: {
contentPath: path.join(process.cwd(), 'content'),
cacheEnabled: false,
watchEnabled: true,
logLevel: 'debug',
},
production: {
contentPath: process.env.CONTENT_PATH || '/var/content',
cacheEnabled: true,
watchEnabled: false,
logLevel: 'warn',
},
test: {
contentPath: path.join(process.cwd(), 'test/fixtures/content'),
cacheEnabled: false,
watchEnabled: false,
logLevel: 'error',
},
}
// Get configuration for current environment
const config = envConfig[environment] || envConfig.development
// Create store with environment-specific configuration
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: config.contentPath,
}),
// Add other configuration based on environment
})
export default store
Working with the Filesystem
Content Directory Structure
Recommended directory structure:
content/
├── articles/
│ ├── welcome.md
│ └── getting-started.md
├── pages/
│ ├── about.md
│ └── contact.md
├── shared/
│ ├── header.md
│ └── footer.md
└── assets/
├── images/
└── documents/
File Watching
Watch for file changes:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
// Create store with filesystem adapter
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
watchOptions: {
ignoreInitial: true,
ignorePermissionErrors: true,
},
}),
})
// Watch for changes to markdown files
const unsubscribe = store.watch('**/*.md', (uri, content, changeType) => {
console.log(`File ${changeType}: ${uri}`)
if (changeType === 'create' || changeType === 'update') {
console.log(`Title: ${content.metadata.title}`)
}
// Trigger any necessary actions
if (changeType === 'update') {
regeneratePages()
}
})
// Later, stop watching
process.on('SIGINT', () => {
console.log('Stopping file watcher...')
unsubscribe()
process.exit()
})
Path Handling
Handle paths correctly in Node.js:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
// Create store with normalized paths
const contentPath = path.resolve(process.cwd(), 'content')
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: contentPath,
}),
})
// Convert filesystem paths to content URIs
function filePathToContentUri(filePath) {
// Get relative path from content directory
const relativePath = path.relative(contentPath, filePath)
// Convert path separators to forward slashes (for cross-platform compatibility)
return relativePath.split(path.sep).join('/')
}
// Convert content URIs to filesystem paths
function contentUriToFilePath(uri) {
// Join with content path and convert to platform-specific separators
return path.join(contentPath, uri)
}
// Usage
const filePath = '/path/to/content/articles/welcome.md'
const uri = filePathToContentUri(filePath)
console.log('Content URI:', uri) // 'articles/welcome.md'
const fsPath = contentUriToFilePath('articles/welcome.md')
console.log('Filesystem path:', fsPath) // '/path/to/content/articles/welcome.md'
Server Integration
Express API
Create a content API with Express:
import express from 'express'
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import {
ContentNotFoundError,
ContentAccessError,
ContentValidationError,
} from '@inherent/content-system/errors'
import path from 'path'
// Create content store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
// Create Express app
const app = express()
app.use(express.json())
// Content API routes
app.get('/api/content/:path(*)', async (req, res, next) => {
try {
const uri = req.params.path
const format = req.query.format
const content = await store.read(uri, {
format: format as string,
})
res.json({
uri,
data: content.data,
metadata: content.metadata,
contentType: content.contentType,
})
} catch (error) {
next(error)
}
})
app.put('/api/content/:path(*)', async (req, res, next) => {
try {
const uri = req.params.path
const { data, contentType, metadata } = req.body
await store.write(uri, { data, contentType, metadata })
res.json({
success: true,
uri,
})
} catch (error) {
next(error)
}
})
app.delete('/api/content/:path(*)', async (req, res, next) => {
try {
const uri = req.params.path
await store.delete(uri)
res.json({
success: true,
uri,
})
} catch (error) {
next(error)
}
})
// List content with pattern matching
app.get('/api/content', async (req, res, next) => {
try {
const pattern = (req.query.pattern as string) || '**/*'
const uris = await store.list(pattern)
res.json({
pattern,
uris,
})
} catch (error) {
next(error)
}
})
// Error handling middleware
app.use((error, req, res, next) => {
console.error('API Error:', error)
if (error instanceof ContentNotFoundError) {
return res.status(404).json({
error: 'NOT_FOUND',
message: `Content not found: ${error.uri}`,
uri: error.uri,
})
}
if (error instanceof ContentAccessError) {
return res.status(403).json({
error: 'ACCESS_DENIED',
message: `Access denied: ${error.uri}`,
uri: error.uri,
})
}
if (error instanceof ContentValidationError) {
return res.status(400).json({
error: 'VALIDATION_ERROR',
message: `Validation failed: ${error.message}`,
validationErrors: error.info?.validationErrors || [],
})
}
// Generic error
res.status(500).json({
error: 'INTERNAL_ERROR',
message: 'An internal server error occurred',
})
})
// Start server
const PORT = process.env.PORT || 3000
app.listen(PORT, () => {
console.log(`Content API server running on port ${PORT}`)
})
Next.js Integration
Integrate with Next.js API routes:
// /pages/api/content/[...path].js
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import { ContentNotFoundError } from '@inherent/content-system/errors'
import path from 'path'
// Create store (consider moving to a singleton in a separate file)
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
export default async function handler(req, res) {
// Get content path from URL
const contentPath = req.query.path.join('/')
try {
switch (req.method) {
case 'GET':
const content = await store.read(contentPath)
res.status(200).json(content)
break
case 'PUT':
const { data, contentType, metadata } = req.body
await store.write(contentPath, { data, contentType, metadata })
res.status(200).json({ success: true })
break
case 'DELETE':
await store.delete(contentPath)
res.status(200).json({ success: true })
break
default:
res.setHeader('Allow', ['GET', 'PUT', 'DELETE'])
res.status(405).end(`Method ${req.method} Not Allowed`)
}
} catch (error) {
if (error instanceof ContentNotFoundError) {
res.status(404).json({ error: 'Not found' })
} else {
console.error('API error:', error)
res.status(500).json({ error: 'Internal server error' })
}
}
}
WebSocket Integration
Real-time content updates with WebSockets:
import express from 'express'
import http from 'http'
import { Server as WebSocketServer } from 'socket.io'
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
// Create Express app and HTTP server
const app = express()
const server = http.createServer(app)
const io = new WebSocketServer(server)
// Create content store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
// WebSocket connection
io.on('connection', socket => {
console.log('Client connected')
// Handle content read requests
socket.on('read', async (uri, callback) => {
try {
const content = await store.read(uri)
callback({ success: true, content })
} catch (error) {
callback({ success: false, error: error.message })
}
})
// Handle content write requests
socket.on('write', async (uri, content, callback) => {
try {
await store.write(uri, content)
callback({ success: true })
// Broadcast change to all clients
socket.broadcast.emit('content-changed', { uri, type: 'update' })
} catch (error) {
callback({ success: false, error: error.message })
}
})
// Handle content watch requests
socket.on('watch', pattern => {
// Add pattern to socket's watched patterns
socket.join(`watch:${pattern}`)
console.log(`Client watching: ${pattern}`)
})
// Handle content unwatch requests
socket.on('unwatch', pattern => {
socket.leave(`watch:${pattern}`)
console.log(`Client unwatching: ${pattern}`)
})
// Disconnect
socket.on('disconnect', () => {
console.log('Client disconnected')
})
})
// Set up content watching
store.watch('**/*', (uri, content, changeType) => {
// Notify all clients watching patterns that match this URI
// This is a simplistic approach; in production you'd use a matcher
io.to(`watch:**/*`).emit('content-changed', {
uri,
type: changeType,
})
})
// Start server
const PORT = process.env.PORT || 3000
server.listen(PORT, () => {
console.log(`WebSocket server running on port ${PORT}`)
})
Content Processing
MDX Processing
Process MDX content:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import { createMdxProcessor } from '@inherent/content-system/mdx'
import path from 'path'
// Create MDX processor
const mdxProcessor = createMdxProcessor({
remarkPlugins: [],
rehypePlugins: [],
})
// Create store with MDX transformation
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
middleware: [
createTransformMiddleware({
transforms: [
{
// Apply to MDX files
match: uri => uri.endsWith('.mdx'),
// Transform on read
read: async content => {
// Process MDX to JSX
const { code, frontmatter } = await mdxProcessor.process(
content.data
)
return {
...content,
data: code,
contentType: 'application/javascript',
metadata: {
...content.metadata,
...frontmatter,
},
}
},
},
],
}),
],
})
// Usage
async function renderMdxPage(uri) {
// Get processed MDX
const content = await store.read(uri)
// The processed content contains compiled JSX ready for rendering
console.log('Compiled MDX:', content.data)
console.log('Frontmatter:', content.metadata)
return content
}
Image Processing
Process images in Node.js:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import sharp from 'sharp'
// Create store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
// Image processing function
async function processImage(inputUri, outputUri, options) {
// Read image content
const content = await store.read(inputUri)
// Ensure it's an image
if (!content.contentType.startsWith('image/')) {
throw new Error(`Not an image: ${inputUri}`)
}
// Process image with Sharp
const imageBuffer = Buffer.from(content.data, 'base64')
let processor = sharp(imageBuffer)
// Apply operations
if (options.resize) {
processor = processor.resize(options.resize.width, options.resize.height, {
fit: options.resize.fit || 'cover',
})
}
if (options.format) {
processor = processor.toFormat(options.format)
}
// Process and get result
const processedImageBuffer = await processor.toBuffer()
// Save processed image
await store.write(outputUri, {
data: processedImageBuffer.toString('base64'),
contentType: `image/${options.format || content.contentType.split('/')[1]}`,
metadata: {
...content.metadata,
processed: true,
processingOptions: options,
},
})
return outputUri
}
// Usage
async function createThumbnail(imageUri) {
const ext = path.extname(imageUri)
const baseName = path.basename(imageUri, ext)
const dir = path.dirname(imageUri)
const thumbnailUri = `${dir}/${baseName}-thumbnail${ext}`
return await processImage(imageUri, thumbnailUri, {
resize: {
width: 200,
height: 200,
fit: 'cover',
},
})
}
Content Migration and Seeding
Content Seeding
Seed content in a Node.js application:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import fs from 'fs/promises'
// Create store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
createMissingDirectories: true,
}),
})
// Seed content from a directory
async function seedContentFromDirectory(sourceDir, targetPrefix = '') {
// Read all files in the source directory
const files = await fs.readdir(sourceDir, { withFileTypes: true })
for (const file of files) {
const sourcePath = path.join(sourceDir, file.name)
if (file.isDirectory()) {
// Recursively seed subdirectories
await seedContentFromDirectory(
sourcePath,
path.join(targetPrefix, file.name)
)
} else {
// Determine content type from extension
const ext = path.extname(file.name).toLowerCase()
const contentType = getContentType(ext)
// Read file data
const data = await fs.readFile(sourcePath, 'utf8')
// Extract metadata (example: front matter for markdown)
const metadata =
ext === '.md' || ext === '.mdx'
? extractFrontMatter(data)
: { title: path.basename(file.name, ext) }
// Write to content store
const targetUri = path.join(targetPrefix, file.name)
await store.write(targetUri, {
data,
contentType,
metadata,
})
console.log(`Seeded: ${targetUri}`)
}
}
}
// Get content type from extension
function getContentType(ext) {
const contentTypes = {
'.md': 'text/markdown',
'.mdx': 'text/mdx',
'.json': 'application/json',
'.html': 'text/html',
'.txt': 'text/plain',
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.png': 'image/png',
'.gif': 'image/gif',
}
return contentTypes[ext] || 'application/octet-stream'
}
// Extract front matter from markdown
function extractFrontMatter(content) {
// Simplified example - in production use a proper front matter library
const frontMatterRegex = /^---\n([\s\S]*?)\n---/
const match = content.match(frontMatterRegex)
if (!match) return {}
try {
const frontMatterLines = match[1].split('\n')
const metadata = {}
for (const line of frontMatterLines) {
const [key, ...valueParts] = line.split(':')
const value = valueParts.join(':').trim()
if (key && value) {
metadata[key.trim()] = value
}
}
return metadata
} catch (error) {
console.error('Error parsing front matter:', error)
return {}
}
}
// Usage - seed content from a directory
async function main() {
try {
await seedContentFromDirectory(path.join(process.cwd(), 'seed-content'))
console.log('Content seeding complete')
} catch (error) {
console.error('Error seeding content:', error)
}
}
main()
Content Export
Export content to files:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import fs from 'fs/promises'
// Create store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
// Export content to files
async function exportContentToDirectory(pattern, exportDir) {
// Create export directory if it doesn't exist
await fs.mkdir(exportDir, { recursive: true })
// List content matching pattern
const uris = await store.list(pattern)
// Export each file
for (const uri of uris) {
try {
// Read content
const content = await store.read(uri)
// Create output directory structure
const outputPath = path.join(exportDir, uri)
await fs.mkdir(path.dirname(outputPath), { recursive: true })
// For Markdown content, add front matter
if (
content.contentType === 'text/markdown' ||
content.contentType === 'text/mdx'
) {
const frontMatter = formatFrontMatter(content.metadata)
await fs.writeFile(outputPath, `${frontMatter}\n${content.data}`)
} else {
// For other content, just write the data
await fs.writeFile(outputPath, content.data)
}
console.log(`Exported: ${uri} to ${outputPath}`)
} catch (error) {
console.error(`Error exporting ${uri}:`, error)
}
}
}
// Format metadata as YAML front matter
function formatFrontMatter(metadata) {
if (!metadata || Object.keys(metadata).length === 0) {
return ''
}
let frontMatter = '---\n'
for (const [key, value] of Object.entries(metadata)) {
// Skip internal metadata
if (key.startsWith('_')) continue
// Format value based on type
let formattedValue
if (value instanceof Date) {
formattedValue = value.toISOString()
} else if (typeof value === 'object') {
formattedValue = JSON.stringify(value)
} else {
formattedValue = String(value)
}
frontMatter += `${key}: ${formattedValue}\n`
}
frontMatter += '---'
return frontMatter
}
// Usage - export content to a directory
async function main() {
try {
await exportContentToDirectory(
'**/*.md',
path.join(process.cwd(), 'export')
)
console.log('Content export complete')
} catch (error) {
console.error('Error exporting content:', error)
}
}
main()
Testing in Node.js
Unit Testing
Test content store with Jest:
import {
createContentStore,
createMemoryAdapter,
} from '@inherent/content-system'
import path from 'path'
// Create a test store with memory adapter
function createTestStore(initialContent = {}) {
return createContentStore({
adapter: createMemoryAdapter({
initialContent,
}),
})
}
// Tests
describe('Content Store', () => {
test('reads and writes content', async () => {
// Create store with initial content
const store = createTestStore({
'test.md': {
data: '# Test',
contentType: 'text/markdown',
metadata: { title: 'Test' },
},
})
// Read content
const content = await store.read('test.md')
expect(content.data).toBe('# Test')
expect(content.metadata.title).toBe('Test')
// Write new content
const newContent = {
data: '# Updated Test',
contentType: 'text/markdown',
metadata: { title: 'Updated Test' },
}
await store.write('test.md', newContent)
// Verify updated content
const updated = await store.read('test.md')
expect(updated.data).toBe('# Updated Test')
expect(updated.metadata.title).toBe('Updated Test')
})
test('lists content with patterns', async () => {
// Create store with multiple content items
const store = createTestStore({
'articles/one.md': createTestContent('Article One'),
'articles/two.md': createTestContent('Article Two'),
'pages/about.md': createTestContent('About Page'),
})
// List all content
const allUris = await store.list('**/*')
expect(allUris).toHaveLength(3)
// List only articles
const articleUris = await store.list('articles/*.md')
expect(articleUris).toHaveLength(2)
expect(articleUris).toContain('articles/one.md')
expect(articleUris).toContain('articles/two.md')
// List only pages
const pageUris = await store.list('pages/*.md')
expect(pageUris).toHaveLength(1)
expect(pageUris).toContain('pages/about.md')
})
test('watches for content changes', async () => {
// Create store
const store = createTestStore()
// Set up watch handler
const changes = []
const unsubscribe = store.watch('**/*.md', (uri, content, changeType) => {
changes.push({ uri, changeType })
})
// Make changes
await store.write('test.md', createTestContent('Test'))
await store.write('test.md', createTestContent('Updated Test'))
await store.delete('test.md')
// Verify changes were captured
expect(changes).toHaveLength(3)
expect(changes[0]).toEqual({ uri: 'test.md', changeType: 'create' })
expect(changes[1]).toEqual({ uri: 'test.md', changeType: 'update' })
expect(changes[2]).toEqual({ uri: 'test.md', changeType: 'delete' })
// Clean up
unsubscribe()
})
})
// Helper function to create test content
function createTestContent(title) {
return {
data: `# ${title}\n\nThis is test content.`,
contentType: 'text/markdown',
metadata: { title },
}
}
Integration Testing
Test with filesystem adapter:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import fs from 'fs/promises'
import os from 'os'
// Create a temporary test directory
async function createTempContentDir() {
const tempDir = path.join(os.tmpdir(), `content-test-${Date.now()}`)
await fs.mkdir(tempDir, { recursive: true })
return tempDir
}
// Clean up test directory
async function cleanupTempDir(tempDir) {
try {
await fs.rm(tempDir, { recursive: true, force: true })
} catch (error) {
console.warn(`Failed to clean up temp directory ${tempDir}:`, error)
}
}
// Tests
describe('Filesystem Adapter Integration', () => {
let tempDir
let store
// Set up before each test
beforeEach(async () => {
tempDir = await createTempContentDir()
store = createContentStore({
adapter: createFileSystemAdapter({
basePath: tempDir,
createMissingDirectories: true,
}),
})
})
// Clean up after each test
afterEach(async () => {
await cleanupTempDir(tempDir)
})
test('reads and writes to filesystem', async () => {
// Write content
const content = {
data: '# Test Content',
contentType: 'text/markdown',
metadata: {
title: 'Test Content',
createdAt: new Date(),
},
}
await store.write('test.md', content)
// Verify file was created
const filePath = path.join(tempDir, 'test.md')
const fileExists = await fs
.access(filePath)
.then(() => true)
.catch(() => false)
expect(fileExists).toBe(true)
// Read content back
const retrieved = await store.read('test.md')
expect(retrieved.data).toBe('# Test Content')
expect(retrieved.metadata.title).toBe('Test Content')
})
test('handles nested directories', async () => {
// Write content in nested directory
await store.write('articles/nested/test.md', {
data: '# Nested Test',
contentType: 'text/markdown',
metadata: { title: 'Nested Test' },
})
// Verify directory structure was created
const dirPath = path.join(tempDir, 'articles/nested')
const dirExists = await fs
.access(dirPath)
.then(() => true)
.catch(() => false)
expect(dirExists).toBe(true)
// List content
const articles = await store.list('articles/**/*.md')
expect(articles).toContain('articles/nested/test.md')
})
test('deletes content from filesystem', async () => {
// Write then delete content
await store.write('delete-test.md', {
data: '# Delete Test',
contentType: 'text/markdown',
metadata: { title: 'Delete Test' },
})
await store.delete('delete-test.md')
// Verify file was deleted
const filePath = path.join(tempDir, 'delete-test.md')
const fileExists = await fs
.access(filePath)
.then(() => true)
.catch(() => false)
expect(fileExists).toBe(false)
})
})
Performance Optimization
Caching
Implement content caching:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import { createCacheMiddleware } from '@inherent/content-system/middleware'
import path from 'path'
import NodeCache from 'node-cache'
// Create a Node.js cache
const cache = new NodeCache({
stdTTL: 600, // 10 minutes
checkperiod: 60, // Check for expired keys every 60 seconds
maxKeys: 1000, // Maximum number of items in cache
})
// Create store with caching middleware
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
middleware: [
createCacheMiddleware({
// Using the Node.js cache adapter
cache: {
get: async key => cache.get(key),
set: async (key, value, ttl) => cache.set(key, value, ttl),
delete: async key => cache.del(key),
clear: async () => cache.flushAll(),
},
// Cache settings
ttl: 3600, // 1 hour in seconds
keyPrefix: 'content:',
cacheMetadata: true,
}),
],
})
// Manually invalidate cache for specific content
async function invalidateCache(pattern) {
const uris = await store.list(pattern)
for (const uri of uris) {
const cacheKey = `content:${uri}`
cache.del(cacheKey)
}
console.log(`Cache invalidated for ${uris.length} items`)
}
// Store monitoring
setInterval(() => {
const stats = cache.getStats()
console.log('Cache stats:', {
keys: cache.keys().length,
hits: stats.hits,
misses: stats.misses,
ksize: stats.ksize,
vsize: stats.vsize,
})
}, 60000) // Log stats every minute
Stream Processing
Process large content with streams:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import fs from 'fs'
import { Readable, Transform } from 'stream'
import { pipeline } from 'stream/promises'
// Create store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
// Process large content with streams
async function processLargeContent(uri, outputUri, transformer) {
// Get content metadata without loading entire content
const { metadata, contentType } = await store.read(uri, {
// Custom option to get only metadata (would need to be implemented)
metadataOnly: true,
})
// Create read stream from file system directly
const filePath = path.join(process.cwd(), 'content', uri)
const readStream = fs.createReadStream(filePath)
// Create output file stream
const outputPath = path.join(process.cwd(), 'content', outputUri)
const outputDir = path.dirname(outputPath)
// Ensure output directory exists
await fs.promises.mkdir(outputDir, { recursive: true })
// Create write stream
const writeStream = fs.createWriteStream(outputPath)
// Process with transform stream
await pipeline(readStream, transformer, writeStream)
// Create processed content metadata
await store.write(
outputUri,
{
// Data is null - the file already exists on disk
data: null,
contentType,
metadata: {
...metadata,
processed: true,
processingDate: new Date(),
},
},
{
// Option to update metadata only (would need to be implemented)
metadataOnly: true,
}
)
return outputUri
}
// Example transformer for CSV processing
function createCsvTransformer(options) {
let headers = null
let lineCount = 0
return new Transform({
objectMode: true,
transform(chunk, encoding, callback) {
const lines = chunk.toString().split('\n')
for (let line of lines) {
if (line.trim() === '') continue
lineCount++
// Parse the CSV line
const values = line.split(',').map(v => v.trim())
// First line is headers
if (lineCount === 1 && !headers) {
headers = values
// If we're adding new columns, add them to headers
if (options.addColumns) {
headers = [...headers, ...Object.keys(options.addColumns)]
}
// Output transformed headers
this.push(headers.join(',') + '\n')
continue
}
// Create record object from values
const record = {}
headers.forEach((header, index) => {
record[header] = values[index] || ''
})
// Apply transformations
if (options.transformRecord) {
options.transformRecord(record)
}
// Add new columns
if (options.addColumns) {
Object.entries(options.addColumns).forEach(([key, value]) => {
record[key] = typeof value === 'function' ? value(record) : value
})
}
// Output transformed record
const outputValues = headers.map(header => record[header] || '')
this.push(outputValues.join(',') + '\n')
}
callback()
},
})
}
// Usage
async function processCsvFile() {
const transformer = createCsvTransformer({
transformRecord: record => {
// Example: convert to uppercase
if (record.name) {
record.name = record.name.toUpperCase()
}
},
addColumns: {
processed: 'yes',
processingDate: () => new Date().toISOString(),
},
})
await processLargeContent(
'data/large-data.csv',
'data/processed/large-data.csv',
transformer
)
console.log('CSV processing complete')
}
Concurrent Operations
Handle concurrent operations efficiently:
import {
createContentStore,
createFileSystemAdapter,
} from '@inherent/content-system'
import path from 'path'
import pLimit from 'p-limit'
// Create store
const store = createContentStore({
adapter: createFileSystemAdapter({
basePath: path.join(process.cwd(), 'content'),
}),
})
// Process multiple content items with concurrency control
async function processBatch(uris, processor, options = {}) {
const {
concurrency = 5,
skipErrors = false,
progressCallback = null,
} = options
// Create concurrency limiter
const limit = pLimit(concurrency)
// Track progress
let completed = 0
let succeeded = 0
let failed = 0
const errors = []
// Create async tasks
const tasks = uris.map(uri =>
limit(async () => {
try {
// Read content
const content = await store.read(uri)
// Process content
const processedContent = await processor(content, uri)
// Write processed content
if (processedContent) {
await store.write(uri, processedContent)
}
// Update progress
completed++
succeeded++
if (progressCallback) {
progressCallback({
uri,
completed,
total: uris.length,
succeeded,
failed,
percentage: Math.round((completed / uris.length) * 100),
})
}
return { uri, success: true }
} catch (error) {
// Handle error
completed++
failed++
const errorInfo = {
uri,
error: error.message,
stack: error.stack,
}
errors.push(errorInfo)
if (progressCallback) {
progressCallback({
uri,
completed,
total: uris.length,
succeeded,
failed,
percentage: Math.round((completed / uris.length) * 100),
error: error.message,
})
}
if (skipErrors) {
return { uri, success: false, error: error.message }
} else {
throw error
}
}
})
)
// Wait for all tasks to complete
const results = await Promise.all(tasks)
return {
total: uris.length,
succeeded,
failed,
errors,
results,
}
}
// Usage
async function processBatchExample() {
// Get all markdown files
const uris = await store.list('**/*.md')
// Define processor function
const processor = async (content, uri) => {
// Example: add a processing timestamp
return {
...content,
metadata: {
...content.metadata,
processedAt: new Date().toISOString(),
},
}
}
// Process batch with progress reporting
const results = await processBatch(uris, processor, {
concurrency: 3,
skipErrors: true,
progressCallback: progress => {
console.log(
`Processing ${progress.uri}: ${progress.percentage}% complete`
)
if (progress.error) {
console.error(`Error processing ${progress.uri}:`, progress.error)
}
},
})
console.log('Batch processing results:', {
total: results.total,
succeeded: results.succeeded,
failed: results.failed,
})
}