Capability Detection Patterns
This document outlines patterns for detecting and adapting to runtime environment capabilities in the ReX content system. These patterns enable the system to work across different environments with varying feature support.
Feature Detection Pattern
Pattern Overview
The Feature Detection pattern allows runtime detection of available APIs and features in the current environment.
Implementation Example
/**
* Environment utilities for capability detection
*/
// Environment type detection
export function getEnvironmentType():
| 'node'
| 'browser'
| 'serviceworker'
| 'unknown' {
if (isNode()) {
return 'node'
} else if (isServiceWorker()) {
return 'serviceworker'
} else if (isBrowser()) {
return 'browser'
}
return 'unknown'
}
// Node.js environment detection
export function isNode(): boolean {
return (
typeof process !== 'undefined' &&
process.versions != null &&
process.versions.node != null
)
}
// Browser environment detection
export function isBrowser(): boolean {
return typeof window !== 'undefined' && typeof document !== 'undefined'
}
// Service Worker environment detection
export function isServiceWorker(): boolean {
return (
typeof self !== 'undefined' &&
typeof self.registration !== 'undefined' &&
self.constructor &&
self.constructor.name === 'ServiceWorkerGlobalScope'
)
}
// Feature detection functions
// Check if Fetch API is available
export function hasFetchSupport(): boolean {
return (
typeof fetch !== 'undefined' &&
typeof Request !== 'undefined' &&
typeof Response !== 'undefined'
)
}
// Check if IndexedDB is available
export function hasIndexedDBSupport(): boolean {
return (
typeof indexedDB !== 'undefined' &&
typeof IDBDatabase !== 'undefined' &&
typeof IDBTransaction !== 'undefined'
)
}
// Check if LocalStorage is available
export function hasLocalStorageSupport(): boolean {
try {
if (typeof localStorage !== 'undefined') {
// Test with a simple operation to check for disabled localStorage
localStorage.setItem('__test', 'test')
localStorage.removeItem('__test')
return true
}
return false
} catch (e) {
// localStorage might be present but throw errors (private mode, quota)
return false
}
}
// Check if Web Workers are available
export function hasWebWorkerSupport(): boolean {
return typeof Worker !== 'undefined'
}
// Check if File System API is available
export function hasFileSystemSupport(): boolean {
return (
isNode() ||
(typeof self !== 'undefined' &&
typeof FileSystem !== 'undefined' &&
typeof FileSystemFileHandle !== 'undefined')
)
}
// Check if FileReader API is available
export function hasFileReaderSupport(): boolean {
return typeof FileReader !== 'undefined'
}
// Check for Promise support
export function hasPromiseSupport(): boolean {
return typeof Promise !== 'undefined'
}
// Check for async/await support (using Function constructor to avoid syntax errors)
export function hasAsyncAwaitSupport(): boolean {
try {
new Function('async () => {}')
return true
} catch (e) {
return false
}
}
// Comprehensive environment capabilities object
export function getEnvironmentCapabilities() {
return {
type: getEnvironmentType(),
features: {
fetch: hasFetchSupport(),
indexedDB: hasIndexedDBSupport(),
localStorage: hasLocalStorageSupport(),
webWorker: hasWebWorkerSupport(),
fileSystem: hasFileSystemSupport(),
fileReader: hasFileReaderSupport(),
promise: hasPromiseSupport(),
asyncAwait: hasAsyncAwaitSupport(),
},
}
}
Considerations
- Test each capability independently rather than making assumptions
- Include fallbacks for missing capabilities
- Cache capability detection results to avoid repeating expensive checks
- Consider browser-specific quirks and inconsistencies
Related Patterns
- Progressive Enhancement Pattern - Building up functionality based on capabilities
- Fallback Pattern - Providing alternatives when capabilities are missing
Capability Advertising Pattern
Pattern Overview
The Capability Advertising pattern communicates available features to system consumers for better decision-making.
Implementation Example
/**
* Component capability registration and querying
*/
// Capability registry to track component capabilities
class CapabilityRegistry {
private static instance: CapabilityRegistry
private capabilities: Map<string, Set<string>> = new Map()
// Singleton access
public static getInstance(): CapabilityRegistry {
if (!CapabilityRegistry.instance) {
CapabilityRegistry.instance = new CapabilityRegistry()
}
return CapabilityRegistry.instance
}
// Register component capabilities
public register(componentId: string, capabilities: string[]): void {
if (!this.capabilities.has(componentId)) {
this.capabilities.set(componentId, new Set())
}
const componentCapabilities = this.capabilities.get(componentId)!
capabilities.forEach(capability => componentCapabilities.add(capability))
}
// Check if component has a specific capability
public hasCapability(componentId: string, capability: string): boolean {
return (
this.capabilities.has(componentId) &&
this.capabilities.get(componentId)!.has(capability)
)
}
// Get all capabilities for a component
public getComponentCapabilities(componentId: string): string[] {
if (!this.capabilities.has(componentId)) {
return []
}
return Array.from(this.capabilities.get(componentId)!)
}
// Find components with specific capability
public findComponentsWithCapability(capability: string): string[] {
const components: string[] = []
this.capabilities.forEach((capabilities, componentId) => {
if (capabilities.has(capability)) {
components.push(componentId)
}
})
return components
}
// Clear registry (useful for testing)
public clear(): void {
this.capabilities.clear()
}
}
// ContentAdapter capability registration
function registerAdapterCapabilities(adapterId: string, adapter) {
const registry = CapabilityRegistry.getInstance()
const capabilities: string[] = []
// Core capabilities
capabilities.push('read')
capabilities.push('write')
// Optional capabilities
if (typeof adapter.list === 'function') {
capabilities.push('list')
}
if (typeof adapter.query === 'function') {
capabilities.push('query')
}
if (typeof adapter.watch === 'function') {
capabilities.push('watch')
}
if (typeof adapter.transaction === 'function') {
capabilities.push('transaction')
}
if (adapter.events && typeof adapter.events.on === 'function') {
capabilities.push('events')
}
// Content type capabilities
if (adapter.supportedContentTypes) {
adapter.supportedContentTypes.forEach(contentType => {
capabilities.push(`contentType:${contentType}`)
})
}
// Register detected capabilities
registry.register(adapterId, capabilities)
}
// ContentStore capability detection
function getStoreCapabilities(store) {
const capabilities = {
// Core operations
operations: {
read: true,
write: true,
delete: true,
list: typeof store.list === 'function',
query: typeof store.query === 'function',
watch: typeof store.watch === 'function',
},
// Content formats
formats: {
text: true,
json: true,
binary: store.supportsBinaryContent === true,
stream: typeof store.readStream === 'function',
},
// Advanced features
features: {
transactions: typeof store.transaction === 'function',
events: store.events && typeof store.events.on === 'function',
history: typeof store.getHistory === 'function',
versioning: typeof store.getVersion === 'function',
permissions: typeof store.checkPermission === 'function',
},
// Environment-specific capabilities
environment: getEnvironmentCapabilities(),
}
return capabilities
}
// Usage example for component selection based on capabilities
function selectAdapter(adapters, requiredCapabilities) {
const registry = CapabilityRegistry.getInstance()
// Filter adapters with all required capabilities
const compatibleAdapters = adapters.filter(adapter => {
return requiredCapabilities.every(capability =>
registry.hasCapability(adapter.id, capability)
)
})
// Sort by number of capabilities (most capable first)
return (
compatibleAdapters.sort((a, b) => {
const aCapabilities = registry.getComponentCapabilities(a.id).length
const bCapabilities = registry.getComponentCapabilities(b.id).length
return bCapabilities - aCapabilities
})[0] || null
)
}
Considerations
- Make capability registration explicit and discoverable
- Provide detailed capability information beyond boolean flags
- Use standardized capability identifiers for consistency
- Document the capability contract for component consumers
Related Patterns
- Feature Registry Pattern - Centralized tracking of available features
- Adapter Factory Pattern - Creating adapters based on capabilities
Progressive Enhancement Pattern
Pattern Overview
The Progressive Enhancement pattern adds functionality when optional features are available without breaking core functionality.
Implementation Example
/**
* Progressively enhanced content store
*/
// Create store with progressive enhancement
export function createProgressiveContentStore(options = {}) {
// Get environment capabilities
const capabilities = getEnvironmentCapabilities()
const store = createBaseContentStore(options)
// Enhance with persistence if available
if (capabilities.features.indexedDB) {
store.persist = async function () {
const persistAdapter = createIndexedDBAdapter({
storeName: options.storeName || 'content-store',
})
// Persist all content to IndexedDB
for (const [uri, content] of this.contentMap.entries()) {
await persistAdapter.write(uri, content)
}
return { success: true, count: this.contentMap.size }
}
} else if (capabilities.features.localStorage) {
store.persist = async function () {
const persistAdapter = createLocalStorageAdapter({
keyPrefix: options.keyPrefix || 'content:',
})
// Persist all content to localStorage
for (const [uri, content] of this.contentMap.entries()) {
await persistAdapter.write(uri, content)
}
return { success: true, count: this.contentMap.size }
}
} else {
// Fallback - persist to memory only
store.persist = async function () {
return { success: false, reason: 'No persistence mechanism available' }
}
}
// Add caching if available
if (capabilities.features.indexedDB || capabilities.features.localStorage) {
enhanceWithCaching(store, capabilities)
}
// Add offline support if in service worker
if (capabilities.type === 'serviceworker') {
enhanceWithOfflineSupport(store)
}
// Add file system integration if available
if (capabilities.features.fileSystem) {
enhanceWithFileSystemSupport(store)
}
// Add real-time sync if available
if (capabilities.features.webWorker) {
enhanceWithBackgroundSync(store)
}
return store
}
// Add caching enhancements
function enhanceWithCaching(store, capabilities) {
// Original read method
const originalRead = store.read.bind(store)
// Enhanced read with caching
store.read = async function (uri, options = {}) {
// Skip cache if requested
if (options.bypassCache) {
return originalRead(uri, options)
}
try {
// Try to read from cache first
const adapter = capabilities.features.indexedDB
? createIndexedDBAdapter({ storeName: 'content-cache' })
: createLocalStorageAdapter({ keyPrefix: 'content-cache:' })
const cachedContent = await adapter.read(uri)
// If cached content is found and not expired
if (cachedContent && !isCacheExpired(cachedContent.metadata)) {
return cachedContent
}
} catch (error) {
// Cache read failed, continue to original implementation
}
// Get fresh content
const content = await originalRead(uri, options)
// Update cache in background
try {
const adapter = capabilities.features.indexedDB
? createIndexedDBAdapter({ storeName: 'content-cache' })
: createLocalStorageAdapter({ keyPrefix: 'content-cache:' })
// Add cache metadata
const contentWithCacheMeta = {
...content,
metadata: {
...content.metadata,
cached: new Date().toISOString(),
expires: new Date(Date.now() + 3600000).toISOString(), // 1 hour
},
}
// Don't await to avoid blocking
adapter.write(uri, contentWithCacheMeta)
} catch (error) {
// Ignore cache write errors
}
return content
}
}
// Helper to check if cache is expired
function isCacheExpired(metadata) {
if (!metadata || !metadata.expires) {
return true
}
const expiresAt = new Date(metadata.expires).getTime()
return Date.now() > expiresAt
}
// Add offline support enhancements
function enhanceWithOfflineSupport(store) {
// Original operations
const originalWrite = store.write.bind(store)
// Enhanced write with offline queue
store.write = async function (uri, content, options = {}) {
try {
// Try normal write
return await originalWrite(uri, content, options)
} catch (error) {
// If offline or network failure, queue for later
if (isNetworkError(error)) {
await addToOfflineQueue({
operation: 'write',
uri,
content,
options,
timestamp: Date.now(),
})
return {
queued: true,
uri,
timestamp: Date.now(),
}
}
// Re-throw other errors
throw error
}
}
// Add sync operation
store.syncOfflineChanges = async function () {
const queue = await getOfflineQueue()
// Process queue
const results = {
total: queue.length,
succeeded: 0,
failed: 0,
errors: [],
}
for (const entry of queue) {
try {
if (entry.operation === 'write') {
await originalWrite(entry.uri, entry.content, entry.options)
} else if (entry.operation === 'delete') {
await store.delete(entry.uri, entry.options)
}
// Remove from queue
await removeFromOfflineQueue(entry)
results.succeeded++
} catch (error) {
results.failed++
results.errors.push({
entry,
error: error.message,
})
}
}
return results
}
// Register for sync events
if ('sync' in self.registration) {
self.addEventListener('sync', event => {
if (event.tag === 'content-sync') {
event.waitUntil(store.syncOfflineChanges())
}
})
}
}
// Helpers for offline queue management
async function addToOfflineQueue(entry) {
const adapter = createIndexedDBAdapter({ storeName: 'offline-queue' })
const queueKey = `queue:${Date.now()}:${entry.uri}`
await adapter.write(queueKey, entry)
}
async function getOfflineQueue() {
const adapter = createIndexedDBAdapter({ storeName: 'offline-queue' })
const keys = await adapter.list('queue:*')
const queue = []
for (const key of keys) {
try {
const entry = await adapter.read(key)
queue.push(entry)
} catch (error) {
// Skip failed entries
}
}
// Sort by timestamp
return queue.sort((a, b) => a.timestamp - b.timestamp)
}
async function removeFromOfflineQueue(entry) {
const adapter = createIndexedDBAdapter({ storeName: 'offline-queue' })
const keys = await adapter.list(`queue:*:${entry.uri}`)
for (const key of keys) {
try {
const queuedEntry = await adapter.read(key)
// Find matching entry
if (queuedEntry.timestamp === entry.timestamp) {
await adapter.delete(key)
}
} catch (error) {
// Ignore errors
}
}
}
// Check if error is network-related
function isNetworkError(error) {
return (
error.name === 'NetworkError' ||
error.message.includes('network') ||
error.message.includes('offline') ||
error.code === 'ENETUNREACH' ||
error.code === 'ENOTFOUND'
)
}
Considerations
- Always provide base functionality that works in all environments
- Add enhanced features in a non-breaking way
- Document which features are conditionally available
- Test in both fully-featured and minimally-supported environments
Related Patterns
- Feature Detection Pattern - Determining available features
- Graceful Degradation Pattern - Maintaining functionality when features are missing
Graceful Degradation Pattern
Pattern Overview
The Graceful Degradation pattern ensures core functionality remains available when optional features are missing.
Implementation Example
/**
* Content adapter with graceful degradation
*/
// Create adapter with graceful degradation
export function createResilientAdapter(options = {}) {
// Detect environment capabilities
const capabilities = getEnvironmentCapabilities()
// Determine optimal implementation
let adapter
// Try primary adapter options in order of preference
if (isNode()) {
adapter = createFilesystemAdapter(options)
} else if (capabilities.features.indexedDB) {
adapter = createIndexedDBAdapter(options)
} else if (capabilities.features.localStorage) {
adapter = createLocalStorageAdapter(options)
} else {
// Fall back to memory adapter if no persistence available
adapter = createMemoryAdapter(options)
// Add warning about lack of persistence
console.warn(
'Content will not persist across page refreshes due to lack of ' +
'IndexedDB and localStorage support in this environment.'
)
}
// Wrap with graceful degradation logic
return wrapWithGracefulDegradation(adapter, capabilities)
}
// Wrap adapter with graceful degradation
function wrapWithGracefulDegradation(adapter, capabilities) {
// Create a new adapter with the same interface
const resilientAdapter = {
// Base operations always supported
read: adapter.read.bind(adapter),
write: adapter.write.bind(adapter),
delete: adapter.delete.bind(adapter),
// Events may be degraded
events: adapter.events || createDegradedEventEmitter(),
// Clean up resources
dispose: adapter.dispose ? adapter.dispose.bind(adapter) : async () => {},
}
// List operation (with graceful degradation)
resilientAdapter.list = adapter.list
? adapter.list.bind(adapter)
: async pattern => {
console.warn(
'List operation not supported by underlying adapter. ' +
'Returning empty result set.'
)
return []
}
// Watch operation (more complex degradation)
if (adapter.watch) {
// Full watch support
resilientAdapter.watch = adapter.watch.bind(adapter)
} else if (adapter.events) {
// Degraded watch implementation using events
resilientAdapter.watch = async (pattern, callback) => {
console.warn(
'Native watch not supported. Using event-based implementation ' +
'which only detects local changes.'
)
const listener = event => {
// Only call callback if URI matches pattern
if (matchesPattern(event.uri, pattern)) {
callback(event.uri, event.content, event.type)
}
}
// Listen for change events
adapter.events.on('change', listener)
// Return unwatch function
return () => {
adapter.events.off('change', listener)
}
}
} else {
// No watch support
resilientAdapter.watch = async (pattern, callback) => {
console.warn(
'Watch operation not supported in this environment. ' +
'Content changes will not be detected.'
)
// Return no-op unwatch function
return () => {}
}
}
// Transaction support (with degradation)
if (adapter.transaction) {
// Full transaction support
resilientAdapter.transaction = adapter.transaction.bind(adapter)
} else {
// Emulated transactions
resilientAdapter.transaction = async operations => {
console.warn(
'Native transactions not supported. Using emulated transactions ' +
'which are not atomic and may result in partial application.'
)
const results = []
try {
// Execute operations sequentially
for (const op of operations) {
let result
switch (op.type) {
case 'read':
result = await resilientAdapter.read(op.uri, op.options)
break
case 'write':
result = await resilientAdapter.write(
op.uri,
op.content,
op.options
)
break
case 'delete':
result = await resilientAdapter.delete(op.uri, op.options)
break
default:
throw new Error(`Unknown operation type: ${op.type}`)
}
results.push(result)
}
return results
} catch (error) {
console.error('Transaction failed:', error)
// No rollback mechanism in emulated transactions
throw new ContentError(
'Transaction failed and cannot be rolled back automatically',
{ cause: error }
)
}
}
}
return resilientAdapter
}
// Create degraded event emitter when native events not available
function createDegradedEventEmitter() {
console.warn(
'Native event system not available. Using limited event emitter ' +
'that only works within current page session.'
)
const listeners = new Map()
return {
on: (event, listener) => {
if (!listeners.has(event)) {
listeners.set(event, new Set())
}
listeners.get(event).add(listener)
// Return unsubscribe function
return () => {
const eventListeners = listeners.get(event)
if (eventListeners) {
eventListeners.delete(listener)
}
}
},
off: (event, listener) => {
const eventListeners = listeners.get(event)
if (eventListeners) {
eventListeners.delete(listener)
}
},
emit: (event, data) => {
const eventListeners = listeners.get(event)
if (eventListeners) {
for (const listener of eventListeners) {
try {
listener(data)
} catch (error) {
console.error('Error in event listener:', error)
}
}
}
},
removeAllListeners: () => {
listeners.clear()
},
}
}
// Helper to match URI against pattern
function matchesPattern(uri, pattern) {
// Convert glob pattern to regex
const regexPattern = pattern.replace(/\*/g, '.*').replace(/\?/g, '.')
const regex = new RegExp(`^${regexPattern}$`)
return regex.test(uri)
}
Considerations
- Document degraded functionality clearly to users
- Provide appropriate warnings when using fallback implementations
- Ensure core operations (read, write, delete) always function
- Test degraded paths as thoroughly as enhanced paths
Related Patterns
- Feature Detection Pattern - Detecting available features
- Fallback Chain Pattern - Trying multiple implementations in sequence
Feature Registry Pattern
Pattern Overview
The Feature Registry pattern provides a central record of available features and their capabilities.
Implementation Example
/**
* Feature registry for system-wide capability tracking
*/
// Feature registry for tracking available functionality
export class FeatureRegistry {
private static instance: FeatureRegistry
private features: Map<string, any> = new Map()
private listeners: Map<string, Set<Function>> = new Map()
// Singleton access
public static getInstance(): FeatureRegistry {
if (!FeatureRegistry.instance) {
FeatureRegistry.instance = new FeatureRegistry()
}
return FeatureRegistry.instance
}
// Register a feature with metadata
public register(featureId: string, metadata: any = {}): void {
const isNew = !this.features.has(featureId)
// Add or update feature
this.features.set(featureId, {
id: featureId,
available: true,
...metadata,
registeredAt: new Date().toISOString(),
})
// Notify listeners
this.notifyListeners(featureId, isNew ? 'added' : 'updated')
}
// Unregister a feature
public unregister(featureId: string): void {
if (this.features.has(featureId)) {
this.features.delete(featureId)
this.notifyListeners(featureId, 'removed')
}
}
// Check if a feature is registered
public hasFeature(featureId: string): boolean {
return this.features.has(featureId)
}
// Get feature metadata
public getFeature(featureId: string): any {
return this.features.get(featureId) || null
}
// Get all registered features
public getAllFeatures(): Array<any> {
return Array.from(this.features.values())
}
// Get features matching a filter
public getFeaturesByFilter(filterFn: (feature: any) => boolean): Array<any> {
return this.getAllFeatures().filter(filterFn)
}
// Get features by category
public getFeaturesByCategory(category: string): Array<any> {
return this.getFeaturesByFilter(feature => feature.category === category)
}
// Listen for feature registry changes
public onChange(featureId: string, listener: Function): () => void {
if (!this.listeners.has(featureId)) {
this.listeners.set(featureId, new Set())
}
this.listeners.get(featureId)!.add(listener)
// Return unsubscribe function
return () => {
const listeners = this.listeners.get(featureId)
if (listeners) {
listeners.delete(listener)
}
}
}
// Notify feature listeners
private notifyListeners(featureId: string, event: string): void {
const listeners = this.listeners.get(featureId)
if (listeners) {
const feature = this.features.get(featureId)
listeners.forEach(listener => {
try {
listener(feature, event)
} catch (error) {
console.error('Error in feature registry listener:', error)
}
})
}
}
}
// Register core system features
function registerSystemFeatures() {
const registry = FeatureRegistry.getInstance()
const capabilities = getEnvironmentCapabilities()
// Register environment
registry.register('environment', {
type: capabilities.type,
category: 'system',
description: 'Runtime environment detection',
})
// Register storage capabilities
registry.register('storage.memory', {
available: true,
category: 'storage',
persistent: false,
description: 'In-memory content storage',
})
if (capabilities.type === 'node') {
registry.register('storage.filesystem', {
available: true,
category: 'storage',
persistent: true,
description: 'Filesystem content storage',
})
}
if (capabilities.features.indexedDB) {
registry.register('storage.indexedDB', {
available: true,
category: 'storage',
persistent: true,
description: 'IndexedDB content storage',
})
}
if (capabilities.features.localStorage) {
registry.register('storage.localStorage', {
available: true,
category: 'storage',
persistent: true,
quotaLimited: true,
description: 'LocalStorage content storage',
})
}
// Register content processing capabilities
registry.register('content.markdown', {
available: true,
category: 'content',
description: 'Markdown content processing',
})
registry.register('content.json', {
available: true,
category: 'content',
description: 'JSON content processing',
})
// Register optional capabilities conditionally
if (capabilities.features.webWorker) {
registry.register('sync.background', {
available: true,
category: 'sync',
description: 'Background content synchronization',
})
}
if (capabilities.type === 'serviceworker') {
registry.register('sync.offline', {
available: true,
category: 'sync',
description: 'Offline content synchronization',
})
}
}
// Feature capability reporting for consumers
export function getAvailableFeatures() {
const registry = FeatureRegistry.getInstance()
// If registry is empty, initialize
if (registry.getAllFeatures().length === 0) {
registerSystemFeatures()
}
return {
// Environment capabilities
environment: registry.getFeature('environment'),
// Storage capabilities
storage: {
memory: registry.hasFeature('storage.memory'),
filesystem: registry.hasFeature('storage.filesystem'),
indexedDB: registry.hasFeature('storage.indexedDB'),
localStorage: registry.hasFeature('storage.localStorage'),
persistent: registry
.getFeaturesByCategory('storage')
.some(feature => feature.persistent),
},
// Content capabilities
content: {
markdown: registry.hasFeature('content.markdown'),
json: registry.hasFeature('content.json'),
binary: registry.hasFeature('content.binary'),
},
// Sync capabilities
sync: {
background: registry.hasFeature('sync.background'),
offline: registry.hasFeature('sync.offline'),
},
// Full feature details
details: registry.getAllFeatures(),
}
}
Considerations
- Keep the registry up-to-date as features become available or unavailable
- Group related capabilities for easier consumption
- Provide both high-level flags and detailed capability information
- Allow runtime registration of features as they’re detected
Related Patterns
- Capability Advertising Pattern - Exposing capabilities to consumers
- Service Locator Pattern - Finding components by capability
Polyfill Pattern
Pattern Overview
The Polyfill Pattern provides alternative implementations for missing platform features.
Implementation Example
/**
* Polyfills for missing platform features
*/
// Check and polyfill Promise
export function ensurePromiseSupport() {
if (typeof Promise === 'undefined') {
// Simple Promise polyfill
console.log('Polyfilling Promise API')
// Implementation omitted for brevity
// In practice, use established polyfills like es6-promise
}
}
// Check and polyfill fetch
export function ensureFetchSupport() {
if (!hasFetchSupport()) {
console.log('Polyfilling Fetch API')
// Simple implementation for Node.js
if (isNode()) {
globalThis.fetch = async (url, options = {}) => {
const http = require('http')
const https = require('https')
return new Promise((resolve, reject) => {
const protocol = url.startsWith('https') ? https : http
const req = protocol.request(url, options, res => {
let data = ''
res.on('data', chunk => {
data += chunk
})
res.on('end', () => {
resolve({
status: res.statusCode,
statusText: res.statusMessage,
headers: res.headers,
text: () => Promise.resolve(data),
json: () => Promise.resolve(JSON.parse(data)),
})
})
})
req.on('error', reject)
if (options.body) {
req.write(options.body)
}
req.end()
})
}
globalThis.Request = class Request {
constructor(url, options = {}) {
this.url = url
this.options = options
}
}
globalThis.Response = class Response {
constructor(body, options = {}) {
this.body = body
this.status = options.status || 200
this.statusText = options.statusText || 'OK'
this.headers = options.headers || {}
}
text() {
return Promise.resolve(this.body.toString())
}
json() {
return Promise.resolve(JSON.parse(this.body.toString()))
}
}
} else {
// For browsers, use XMLHttpRequest
globalThis.fetch = (url, options = {}) => {
return new Promise((resolve, reject) => {
const xhr = new XMLHttpRequest()
xhr.open(options.method || 'GET', url)
if (options.headers) {
Object.entries(options.headers).forEach(([key, value]) => {
xhr.setRequestHeader(key, value.toString())
})
}
xhr.onload = () => {
const response = {
status: xhr.status,
statusText: xhr.statusText,
headers: parseXHRHeaders(xhr),
text: () => Promise.resolve(xhr.responseText),
json: () => Promise.resolve(JSON.parse(xhr.responseText)),
}
resolve(response)
}
xhr.onerror = () => {
reject(new Error('Network request failed'))
}
xhr.send(options.body)
})
}
// Add Request and Response classes
// Implementation omitted for brevity
}
}
}
// Check and polyfill EventTarget
export function ensureEventTargetSupport() {
if (typeof EventTarget === 'undefined') {
console.log('Polyfilling EventTarget')
// Simple EventTarget implementation
class EventTargetPolyfill {
constructor() {
this.listeners = {}
}
addEventListener(type, callback) {
if (!(type in this.listeners)) {
this.listeners[type] = []
}
this.listeners[type].push(callback)
}
removeEventListener(type, callback) {
if (!(type in this.listeners)) return
const stack = this.listeners[type]
const index = stack.indexOf(callback)
if (index !== -1) {
stack.splice(index, 1)
}
}
dispatchEvent(event) {
if (!(event.type in this.listeners)) return true
const stack = this.listeners[event.type].slice()
for (let i = 0; i < stack.length; i++) {
stack[i].call(this, event)
}
return !event.defaultPrevented
}
}
globalThis.EventTarget = EventTargetPolyfill
}
}
// Main polyfill application function
export function applyPolyfills() {
// Check environment capabilities
const capabilities = getEnvironmentCapabilities()
// Apply polyfills as needed
if (!capabilities.features.promise) {
ensurePromiseSupport()
}
if (!capabilities.features.fetch) {
ensureFetchSupport()
}
if (!hasEventTargetSupport()) {
ensureEventTargetSupport()
}
// Update environment capabilities after polyfilling
const updatedCapabilities = getEnvironmentCapabilities()
return {
originalCapabilities: capabilities,
currentCapabilities: updatedCapabilities,
polyfilled: {
promise:
!capabilities.features.promise && updatedCapabilities.features.promise,
fetch: !capabilities.features.fetch && updatedCapabilities.features.fetch,
eventTarget: !hasEventTargetSupport() && hasEventTargetSupport(),
},
}
}
// Check EventTarget support
function hasEventTargetSupport() {
return typeof EventTarget !== 'undefined'
}
// Helper for XHR headers
function parseXHRHeaders(xhr) {
const headerString = xhr.getAllResponseHeaders()
const headerLines = headerString.split('\r\n')
const headers = {}
headerLines.forEach(line => {
if (line) {
const parts = line.split(': ')
const key = parts.shift()
const value = parts.join(': ')
headers[key] = value
}
})
return headers
}
Considerations
- Only polyfill features that are actually needed by your application
- Use established polyfill libraries when available for better reliability
- Clearly document which features are polyfilled and any limitations
- Test polyfilled functionality thoroughly across target environments
Related Patterns
- Feature Detection Pattern - Detecting missing features
- Shim Pattern - Intercepting API calls to normalize behavior
Shim Pattern
Pattern Overview
The Shim Pattern intercepts API calls to normalize behavior across different environments.
Implementation Example
/**
* Storage API shim for consistent behavior
*/
// Normalize storage API behavior
export function createStorageShim(storage) {
// Return normalized API
return {
// Get item with consistent error handling
getItem: key => {
try {
const value = storage.getItem(key)
// Normalize null/undefined handling
if (value === null || value === undefined) {
throw new Error('Item not found')
}
return value
} catch (error) {
// Normalize error
throw new StorageError(`Failed to get item "${key}"`, {
cause: error,
operation: 'get',
})
}
},
// Set item with consistent error handling
setItem: (key, value) => {
try {
// Ensure value is string
storage.setItem(key, String(value))
} catch (error) {
// Detect quota error
if (isQuotaError(error)) {
throw new StorageQuotaError(`Storage quota exceeded for "${key}"`, {
cause: error,
operation: 'set',
recoverable: false,
})
}
// Normalize other errors
throw new StorageError(`Failed to set item "${key}"`, {
cause: error,
operation: 'set',
})
}
},
// Remove item with consistent error handling
removeItem: key => {
try {
storage.removeItem(key)
} catch (error) {
// Normalize error
throw new StorageError(`Failed to remove item "${key}"`, {
cause: error,
operation: 'remove',
})
}
},
// Clear storage with consistent error handling
clear: () => {
try {
storage.clear()
} catch (error) {
// Normalize error
throw new StorageError('Failed to clear storage', {
cause: error,
operation: 'clear',
})
}
},
// Get all keys with consistent behavior
keys: () => {
try {
// Different implementations have different ways to get keys
if (typeof storage.keys === 'function') {
return storage.keys()
}
// For localStorage/sessionStorage
if (typeof storage.length === 'number') {
const keys = []
for (let i = 0; i < storage.length; i++) {
keys.push(storage.key(i))
}
return keys
}
// For Map-like storage
if (typeof storage.forEach === 'function') {
const keys = []
storage.forEach((_, key) => keys.push(key))
return keys
}
// For object-like storage
if (typeof storage === 'object') {
return Object.keys(storage)
}
throw new Error('Unsupported storage type')
} catch (error) {
// Normalize error
throw new StorageError('Failed to get storage keys', {
cause: error,
operation: 'keys',
})
}
},
// Get storage size with consistent behavior
size: () => {
try {
// Different implementations have different ways to get size
if (typeof storage.size === 'number') {
return storage.size
}
// For localStorage/sessionStorage
if (typeof storage.length === 'number') {
return storage.length
}
// Count keys
const keys = storage.keys()
return Array.isArray(keys) ? keys.length : 0
} catch (error) {
// Normalize error
throw new StorageError('Failed to get storage size', {
cause: error,
operation: 'size',
})
}
},
// Check if key exists with consistent behavior
hasItem: key => {
try {
// Different implementations have different ways to check
if (typeof storage.has === 'function') {
return storage.has(key)
}
// For standard Storage interface
return key in storage
} catch (error) {
// Normalize error
throw new StorageError(`Failed to check for item "${key}"`, {
cause: error,
operation: 'has',
})
}
},
}
}
// Helper to detect quota errors
function isQuotaError(error) {
return (
error.name === 'QuotaExceededError' ||
error.code === 22 || // Chrome's quota error code
error.message.includes('quota') ||
error.message.includes('storage full')
)
}
Considerations
- Focus on normalizing error handling and edge cases
- Keep the API surface minimal and aligned with standards
- Document deviations from standard behavior
- Test across all target environments
Related Patterns
- Adapter Pattern - Converting between different interfaces
- Facade Pattern - Simplifying complex APIs