Express.js Production Deployment และ Performance
Express Developer•16 ธันวาคม 2567•Deployment
ExpressProductionDeploymentPerformanceDockerNginx
การ deploy Express.js applications สำหรับ production environment ต้องการการเตรียมการและการปรับแต่งที่เหมาะสม
Production Configuration
// config/production.js module.exports = { app: { port: process.env.PORT || 3000, env: 'production' }, database: { uri: process.env.MONGODB_URI, options: { useNewUrlParser: true, useUnifiedTopology: true, maxPoolSize: 10, // จำนวน connections สูงสุด serverSelectionTimeoutMS: 5000, socketTimeoutMS: 45000, bufferMaxEntries: 0 } }, redis: { host: process.env.REDIS_HOST || 'localhost', port: process.env.REDIS_PORT || 6379, password: process.env.REDIS_PASSWORD, maxRetriesPerRequest: 3, retryDelayOnFailover: 100 }, jwt: { secret: process.env.JWT_SECRET, expiresIn: '1d' }, cors: { origin: process.env.ALLOWED_ORIGINS?.split(',') || ['https://yourdomain.com'], credentials: true } }
Environment Variables
# .env.production NODE_ENV=production PORT=3000 # Database MONGODB_URI=mongodb+srv://username:password@cluster.mongodb.net/database # Redis REDIS_HOST=redis.example.com REDIS_PORT=6379 REDIS_PASSWORD=your-redis-password # Security JWT_SECRET=your-super-secret-jwt-key-with-minimum-32-characters COOKIE_SECRET=your-cookie-secret # CORS ALLOWED_ORIGINS=https://yourdomain.com,https://www.yourdomain.com # Rate Limiting RATE_LIMIT_WINDOW_MS=900000 RATE_LIMIT_MAX_REQUESTS=100 # Logging LOG_LEVEL=info LOG_FILE_PATH=/var/log/app/app.log
Security Middleware
// middleware/security.js const helmet = require('helmet') const rateLimit = require('express-rate-limit') const mongoSanitize = require('express-mongo-sanitize') const xss = require('xss-clean') const hpp = require('hpp') const cors = require('cors') // Rate limiting const createRateLimiter = (windowMs, max, message) => { return rateLimit({ windowMs, max, message: { success: false, message }, standardHeaders: true, legacyHeaders: false }) } // General rate limit const generalLimiter = createRateLimiter( 15 * 60 * 1000, // 15 minutes 100, // 100 requests per window 'Too many requests, please try again later' ) // Auth rate limit const authLimiter = createRateLimiter( 15 * 60 * 1000, // 15 minutes 5, // 5 login attempts per window 'Too many login attempts, please try again later' ) // API key rate limit const apiLimiter = createRateLimiter( 60 * 60 * 1000, // 1 hour 1000, // 1000 requests per hour 'API rate limit exceeded' ) const setupSecurity = (app) => { // Security headers app.use(helmet({ crossOriginEmbedderPolicy: false, contentSecurityPolicy: { directives: { defaultSrc: ["'self'"], styleSrc: ["'self'", "'unsafe-inline'"], scriptSrc: ["'self'"], imgSrc: ["'self'", "data:", "https:"], connectSrc: ["'self'"], fontSrc: ["'self'"], objectSrc: ["'none'"], mediaSrc: ["'self'"], frameSrc: ["'none'"] } } })) // CORS app.use(cors({ origin: (origin, callback) => { const allowedOrigins = process.env.ALLOWED_ORIGINS?.split(',') || [] // Allow requests with no origin (mobile apps, Postman, etc.) if (!origin) return callback(null, true) if (allowedOrigins.indexOf(origin) !== -1) { callback(null, true) } else { callback(new Error('Not allowed by CORS')) } }, credentials: true, methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'], allowedHeaders: ['Content-Type', 'Authorization', 'X-Requested-With'] })) // Data sanitization app.use(mongoSanitize()) // NoSQL injection protection app.use(xss()) // XSS protection app.use(hpp()) // HTTP parameter pollution protection // Rate limiting app.use('/api/', generalLimiter) app.use('/api/auth/', authLimiter) app.use('/api/v1/', apiLimiter) } module.exports = { setupSecurity, authLimiter, apiLimiter }
Logging
// utils/logger.js const winston = require('winston') const path = require('path') const { combine, timestamp, errors, json, printf, colorize } = winston.format // Custom format for console const consoleFormat = printf(({ level, message, timestamp, stack }) => { return `${timestamp} [${level}]: ${stack || message}` }) // Create logger const logger = winston.createLogger({ level: process.env.LOG_LEVEL || 'info', format: combine( timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), errors({ stack: true }), json() ), defaultMeta: { service: 'express-api' }, transports: [ // File transport for errors new winston.transports.File({ filename: path.join(process.env.LOG_FILE_PATH || './logs', 'error.log'), level: 'error', maxsize: 10000000, // 10MB maxFiles: 5 }), // File transport for all logs new winston.transports.File({ filename: path.join(process.env.LOG_FILE_PATH || './logs', 'combined.log'), maxsize: 10000000, // 10MB maxFiles: 5 }) ] }) // Console transport for non-production if (process.env.NODE_ENV !== 'production') { logger.add(new winston.transports.Console({ format: combine( colorize(), timestamp({ format: 'HH:mm:ss' }), errors({ stack: true }), consoleFormat ) })) } // HTTP request logging middleware const httpLogger = (req, res, next) => { const start = Date.now() res.on('finish', () => { const duration = Date.now() - start const logData = { method: req.method, url: req.url, status: res.statusCode, duration: `${duration}ms`, ip: req.ip, userAgent: req.get('User-Agent') } if (res.statusCode >= 400) { logger.error('HTTP Request', logData) } else { logger.info('HTTP Request', logData) } }) next() } module.exports = { logger, httpLogger }
Caching with Redis
// utils/cache.js const redis = require('redis') const { logger } = require('./logger') class CacheService { constructor() { this.client = null this.isConnected = false } async connect() { try { this.client = redis.createClient({ host: process.env.REDIS_HOST, port: process.env.REDIS_PORT, password: process.env.REDIS_PASSWORD, retryDelayOnFailover: 100, maxRetriesPerRequest: 3 }) this.client.on('error', (err) => { logger.error('Redis connection error:', err) this.isConnected = false }) this.client.on('connect', () => { logger.info('Connected to Redis') this.isConnected = true }) await this.client.connect() } catch (error) { logger.error('Failed to connect to Redis:', error) } } async get(key) { if (!this.isConnected) return null try { const value = await this.client.get(key) return value ? JSON.parse(value) : null } catch (error) { logger.error('Cache get error:', error) return null } } async set(key, value, ttl = 3600) { if (!this.isConnected) return false try { await this.client.setEx(key, ttl, JSON.stringify(value)) return true } catch (error) { logger.error('Cache set error:', error) return false } } async del(key) { if (!this.isConnected) return false try { await this.client.del(key) return true } catch (error) { logger.error('Cache delete error:', error) return false } } async flushAll() { if (!this.isConnected) return false try { await this.client.flushAll() return true } catch (error) { logger.error('Cache flush error:', error) return false } } } // Cache middleware const cacheMiddleware = (ttl = 300) => { return async (req, res, next) => { if (req.method !== 'GET') { return next() } const key = `cache:${req.originalUrl}` const cached = await cache.get(key) if (cached) { logger.info('Cache hit', { key }) return res.json(cached) } // Override res.json to cache the response const originalJson = res.json res.json = function(data) { if (res.statusCode === 200) { cache.set(key, data, ttl) logger.info('Response cached', { key, ttl }) } return originalJson.call(this, data) } next() } } const cache = new CacheService() module.exports = { cache, cacheMiddleware }
Health Checks
// routes/health.js const express = require('express') const mongoose = require('mongoose') const { cache } = require('../utils/cache') const router = express.Router() // Basic health check router.get('/', (req, res) => { res.json({ status: 'OK', timestamp: new Date().toISOString(), uptime: process.uptime(), environment: process.env.NODE_ENV }) }) // Detailed health check router.get('/detailed', async (req, res) => { const health = { status: 'OK', timestamp: new Date().toISOString(), uptime: process.uptime(), environment: process.env.NODE_ENV, version: process.env.npm_package_version, checks: { database: 'OK', cache: 'OK', memory: 'OK' } } // Database check try { if (mongoose.connection.readyState !== 1) { health.checks.database = 'ERROR' health.status = 'ERROR' } } catch (error) { health.checks.database = 'ERROR' health.status = 'ERROR' } // Cache check try { if (!cache.isConnected) { health.checks.cache = 'WARNING' if (health.status === 'OK') health.status = 'WARNING' } } catch (error) { health.checks.cache = 'ERROR' health.status = 'ERROR' } // Memory check const memUsage = process.memoryUsage() const memUsagePercent = (memUsage.heapUsed / memUsage.heapTotal) * 100 if (memUsagePercent > 90) { health.checks.memory = 'ERROR' health.status = 'ERROR' } else if (memUsagePercent > 75) { health.checks.memory = 'WARNING' if (health.status === 'OK') health.status = 'WARNING' } health.memory = { used: `${Math.round(memUsage.heapUsed / 1024 / 1024)}MB`, total: `${Math.round(memUsage.heapTotal / 1024 / 1024)}MB`, percentage: `${Math.round(memUsagePercent)}%` } const statusCode = health.status === 'OK' ? 200 : health.status === 'WARNING' ? 200 : 503 res.status(statusCode).json(health) }) module.exports = router
Process Management
// server.js const app = require('./app') const { logger } = require('./utils/logger') const { cache } = require('./utils/cache') const connectDB = require('./config/database') const PORT = process.env.PORT || 3000 let server const startServer = async () => { try { // Connect to database await connectDB() // Connect to cache await cache.connect() // Start server server = app.listen(PORT, () => { logger.info(`Server running on port ${PORT} in ${process.env.NODE_ENV} mode`) }) // Handle unhandled promise rejections process.on('unhandledRejection', (err) => { logger.error('Unhandled Promise Rejection:', err) closeServer() }) // Handle uncaught exceptions process.on('uncaughtException', (err) => { logger.error('Uncaught Exception:', err) closeServer() }) // Graceful shutdown process.on('SIGTERM', () => { logger.info('SIGTERM received, shutting down gracefully') closeServer() }) process.on('SIGINT', () => { logger.info('SIGINT received, shutting down gracefully') closeServer() }) } catch (error) { logger.error('Failed to start server:', error) process.exit(1) } } const closeServer = () => { if (server) { server.close(() => { logger.info('Server closed') process.exit(0) }) // Force close after 10 seconds setTimeout(() => { logger.error('Forced shutdown') process.exit(1) }, 10000) } else { process.exit(0) } } startServer()
Docker Configuration
# Dockerfile FROM node:18-alpine AS builder WORKDIR /app # Copy package files COPY package*.json ./ # Install dependencies RUN npm ci --only=production && npm cache clean --force # Production stage FROM node:18-alpine AS production # Create app user RUN addgroup -g 1001 -S nodejs RUN adduser -S express -u 1001 WORKDIR /app # Copy files from builder COPY /app/node_modules ./node_modules COPY . . # Create logs directory RUN mkdir -p /app/logs && chown express:nodejs /app/logs # Switch to non-root user USER express # Expose port EXPOSE 3000 # Health check HEALTHCHECK \ CMD node healthcheck.js # Start application CMD ["node", "server.js"]
# docker-compose.yml version: '3.8' services: app: build: . ports: - "3000:3000" environment: - NODE_ENV=production - MONGODB_URI=mongodb://mongo:27017/express_blog - REDIS_HOST=redis - REDIS_PORT=6379 depends_on: - mongo - redis restart: unless-stopped volumes: - ./logs:/app/logs mongo: image: mongo:6 ports: - "27017:27017" volumes: - mongo_data:/data/db restart: unless-stopped redis: image: redis:7-alpine ports: - "6379:6379" restart: unless-stopped nginx: image: nginx:alpine ports: - "80:80" - "443:443" volumes: - ./nginx.conf:/etc/nginx/nginx.conf - ./ssl:/etc/nginx/ssl depends_on: - app restart: unless-stopped volumes: mongo_data:
Nginx Configuration
# nginx.conf events { worker_connections 1024; } http { upstream app { server app:3000; } # Rate limiting limit_req_zone $binary_remote_addr zone=api:10m rate=10r/s; server { listen 80; server_name yourdomain.com www.yourdomain.com; # Redirect HTTP to HTTPS return 301 https://$server_name$request_uri; } server { listen 443 ssl http2; server_name yourdomain.com www.yourdomain.com; # SSL configuration ssl_certificate /etc/nginx/ssl/cert.pem; ssl_certificate_key /etc/nginx/ssl/key.pem; ssl_protocols TLSv1.2 TLSv1.3; ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384; # Security headers add_header X-Frame-Options DENY; add_header X-Content-Type-Options nosniff; add_header X-XSS-Protection "1; mode=block"; add_header Strict-Transport-Security "max-age=31536000; includeSubDomains"; # Gzip compression gzip on; gzip_types text/plain text/css application/json application/javascript text/xml application/xml; # Rate limiting limit_req zone=api burst=20 nodelay; location / { proxy_pass http://app; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; # Timeouts proxy_connect_timeout 60s; proxy_send_timeout 60s; proxy_read_timeout 60s; } # Health check endpoint location /health { access_log off; proxy_pass http://app; } } }
Performance Monitoring
// middleware/monitoring.js const prometheus = require('prom-client') // Create metrics const httpRequestDuration = new prometheus.Histogram({ name: 'http_request_duration_seconds', help: 'Duration of HTTP requests in seconds', labelNames: ['method', 'route', 'status'] }) const httpRequestTotal = new prometheus.Counter({ name: 'http_requests_total', help: 'Total number of HTTP requests', labelNames: ['method', 'route', 'status'] }) const activeConnections = new prometheus.Gauge({ name: 'http_active_connections', help: 'Number of active HTTP connections' }) // Register default metrics prometheus.register.setDefaultLabels({ app: 'express-api', version: process.env.npm_package_version || '1.0.0' }) prometheus.collectDefaultMetrics() const metricsMiddleware = (req, res, next) => { const start = Date.now() res.on('finish', () => { const duration = (Date.now() - start) / 1000 const route = req.route?.path || req.path httpRequestDuration .labels(req.method, route, res.statusCode.toString()) .observe(duration) httpRequestTotal .labels(req.method, route, res.statusCode.toString()) .inc() }) next() } // Metrics endpoint const metricsEndpoint = async (req, res) => { try { res.set('Content-Type', prometheus.register.contentType) res.end(await prometheus.register.metrics()) } catch (error) { res.status(500).end(error) } } module.exports = { metricsMiddleware, metricsEndpoint, activeConnections }
Deployment Scripts
#!/bin/bash # deploy.sh set -e echo "Starting deployment..." # Build Docker image docker build -t express-api:latest . # Stop existing containers docker-compose down # Start new containers docker-compose up -d # Wait for health check echo "Waiting for application to start..." sleep 30 # Check health if curl -f http://localhost:3000/health; then echo "Deployment successful!" else echo "Deployment failed - rolling back..." docker-compose down docker-compose up -d exit 1 fi
Best Practices
- Environment Configuration - ใช้ environment variables สำหรับ config
- Security - implement comprehensive security measures
- Monitoring - setup monitoring และ alerting
- Logging - structured logging with proper levels
- Caching - implement caching strategies
- Health Checks - provide health check endpoints
- Graceful Shutdown - handle shutdown signals properly
- Error Handling - comprehensive error handling and reporting
สรุป
การ deploy Express.js applications สำหรับ production ต้องการการเตรียมการหลายด้าน ตั้งแต่ security, performance, monitoring ไปจนถึง deployment automation การทำตามแนวทางเหล่านี้จะช่วยให้ applications มีความเสถียรและประสิทธิภาพในสภาพแวดล้อม production