Resource Limit and Quota Bypass Vulnerabilities in Application Systems

High Risk Business Logic
resource-limitsquota-bypassrate-limitingusage-restrictionsdenial-of-serviceresource-exhaustionbandwidth-abuse

What it is

A high-severity vulnerability where applications fail to properly enforce resource limits, quotas, or usage restrictions, allowing attackers to bypass intended limitations on API calls, file uploads, storage usage, bandwidth consumption, or computational resources. These attacks can lead to denial of service, resource exhaustion, financial impact through quota overuse, and degraded system performance affecting legitimate users.

// VULNERABLE: Simple rate limiting with bypass opportunities const rateLimit = require('express-rate-limit'); const basicRateLimit = rateLimit({ windowMs: 15 * 60 * 1000, max: 100, message: 'Too many requests', keyGenerator: (req) => req.ip // Only IP-based }); app.use('/api', basicRateLimit); app.post('/api/upload', upload.single('file'), async (req, res) => { // PROBLEM: No quota validation before upload const file = req.file; if (file.size > 10 * 1024 * 1024) { return res.status(413).json({ error: 'File too large' }); } // Process file without resource tracking const result = await processFile(file); res.json({ success: true, fileId: result.id }); });
// SECURE: Comprehensive resource management with monitoring and limits const { body, validationResult } = require('express-validator'); const crypto = require('crypto'); // Initialize secure resource management const resourceManager = new ResourceLimitManager(); const operationExecutor = new SecureOperationExecutor(); const fileUploadManager = new SecureFileUploadManager(); // Create adaptive rate limiting middleware const createAdaptiveRateLimit = () => { return async (req, res, next) => { try { const userId = req.user?.id || null; const operationType = 'api_request'; // Check comprehensive resource limits const limitCheck = await resourceManager.checkResourceLimits( userId, operationType, { endpoint: req.path, method: req.method, userAgent: req.get('User-Agent'), ip: req.ip } ); if (!limitCheck.allowed) { return res.status(429).json({ error: 'Resource limit exceeded', reason: limitCheck.reason, limits: limitCheck.limits, retryAfter: limitCheck.retryAfter || 60 }); } // Add resource tracking to request req.resourceLimits = limitCheck.limits; // Track API request await resourceManager.updateUserUsage( userId || 'anonymous', 'api_requests', { increment: 1, endpoint: req.path } ); next(); } catch (error) { console.error('Rate limiting error:', error); // Allow request to proceed if rate limiter fails next(); } }; }; app.use('/api', createAdaptiveRateLimit()); // Secure file upload endpoint with comprehensive resource management app.post('/api/upload', [ body('metadata').optional().isJSON().withMessage('Invalid metadata format'), body('options').optional().isJSON().withMessage('Invalid options format') ], async (req, res) => { let operationTracker = null; try { const errors = validationResult(req); if (!errors.isEmpty()) { return res.status(400).json({ errors: errors.array() }); } const userId = req.user.id; const uploadOptions = { maxFileSize: 50 * 1024 * 1024, // 50MB allowedTypes: ['image/jpeg', 'image/png', 'application/pdf'], virusScanRequired: true, contentValidation: true }; // Use secure multer configuration const secureUpload = fileUploadManager.createSecureMulterConfig(); // Wrap multer in promise for better error handling const processUpload = promisify(secureUpload.single('file')); // Execute upload with resource management const uploadResult = await operationExecutor.executeWithLimits( 'file_upload', async (tracker) => { operationTracker = tracker; // Process upload with monitoring await processUpload(req, res); if (!req.file) { throw new Error('No file uploaded'); } // Update progress tracker.updateProgress(0.3); // Upload completed // Process file with comprehensive security checks const fileResult = await fileUploadManager.processUploadedFile( req.file, userId, uploadOptions ); tracker.updateProgress(0.8); // Processing completed return fileResult; }, userId, { fileSize: req.file?.size, contentType: req.file?.mimetype, operation: 'file_upload' } ); // Log successful upload await SecurityLog.create({ type: 'file_upload_success', userId, details: { fileId: uploadResult.result.fileId, filename: uploadResult.result.filename, size: uploadResult.result.size, contentType: uploadResult.result.contentType, operationStats: uploadResult.operationStats }, timestamp: new Date() }); res.json({ success: true, fileId: uploadResult.result.fileId, filename: uploadResult.result.filename, size: uploadResult.result.size, contentType: uploadResult.result.contentType, uploadMetrics: { duration: uploadResult.operationStats.duration, memoryUsed: uploadResult.operationStats.memoryUsed, operationId: uploadResult.operationId }, quotaInfo: { current: req.resourceLimits.user.find(l => l.type === 'upload_size')?.current, limit: req.resourceLimits.user.find(l => l.type === 'upload_size')?.limit, remaining: req.resourceLimits.user.find(l => l.type === 'upload_size')?.remaining } }); } catch (error) { // Log upload failure await SecurityLog.create({ type: 'file_upload_failure', userId: req.user?.id, details: { error: error.message, filename: req.file?.originalname, size: req.file?.size, contentType: req.file?.mimetype }, timestamp: new Date() }); console.error('File upload error:', { error: error.message, stack: error.stack, userId: req.user?.id, fileInfo: { originalname: req.file?.originalname, size: req.file?.size, mimetype: req.file?.mimetype }, timestamp: new Date() }); // Determine appropriate error response const statusCode = error.message.includes('quota') ? 413 : error.message.includes('virus') ? 422 : error.message.includes('content') ? 422 : error.message.includes('timeout') ? 408 : 500; res.status(statusCode).json({ error: error.message, timestamp: new Date().toISOString(), operationId: operationTracker?.operationId }); } } );

💡 Why This Fix Works

The secure implementation establishes comprehensive resource monitoring that tracks CPU, memory, and I/O usage in real-time, implements adaptive rate limiting that adjusts based on system load and user behavior, provides secure file upload with atomic quota management and virus scanning, uses circuit breaker patterns to protect against cascading failures, includes detailed audit logging for all resource usage events, and implements graceful degradation when resource limits are approached.

Why it happens

API endpoints that implement weak or bypassable rate limiting mechanisms allow attackers to exceed intended usage limits through various techniques including request distribution, header manipulation, or exploiting rate limit implementation flaws.

Root causes

API Rate Limiting Bypass and Quota Circumvention

API endpoints that implement weak or bypassable rate limiting mechanisms allow attackers to exceed intended usage limits through various techniques including request distribution, header manipulation, or exploiting rate limit implementation flaws.

Preview example – JAVASCRIPT
// VULNERABLE: API rate limiting with multiple bypass opportunities
const express = require('express');
const rateLimit = require('express-rate-limit');
const app = express();

// PROBLEM 1: Simple rate limiting with easy bypasses
const basicRateLimit = rateLimit({
    windowMs: 15 * 60 * 1000, // 15 minutes
    max: 100, // PROBLEM: Fixed limit without user context
    message: 'Too many requests',
    keyGenerator: (req) => {
        // VULNERABLE: Only using IP address
        return req.ip;
    },
    skip: (req) => {
        // PROBLEM: Admin bypass without proper validation
        return req.headers['x-admin-bypass'] === 'true';
    }
});

// VULNERABLE: Rate limiting applied inconsistently
app.use('/api/public', basicRateLimit);

// PROBLEM 2: No rate limiting on critical endpoints
app.post('/api/upload', async (req, res) => {
    try {
        // VULNERABLE: No upload rate limiting
        const file = req.file;
        
        // PROBLEM: File size check after upload
        if (file.size > 10 * 1024 * 1024) { // 10MB
            return res.status(413).json({ error: 'File too large' });
        }
        
        // PROBLEM: No user quota validation
        const result = await processFileUpload(file, req.user.id);
        
        res.json({ success: true, fileId: result.id });
        
    } catch (error) {
        res.status(500).json({ error: 'Upload failed' });
    }
});

// PROBLEM 3: Per-user quotas not enforced
class UserQuotaManager {
    constructor() {
        this.quotas = new Map(); // In-memory storage is problematic
    }
    
    // VULNERABLE: Quota checking without atomic operations
    async checkUserQuota(userId, resourceType, requestedAmount) {
        const userQuota = await this.getUserQuota(userId, resourceType);
        const currentUsage = await this.getCurrentUsage(userId, resourceType);
        
        // PROBLEM: Check-then-act race condition
        if (currentUsage + requestedAmount > userQuota.limit) {
            return {
                allowed: false,
                reason: 'Quota exceeded',
                current: currentUsage,
                limit: userQuota.limit
            };
        }
        
        // VULNERABLE: Usage update separate from check
        await this.incrementUsage(userId, resourceType, requestedAmount);
        
        return {
            allowed: true,
            remaining: userQuota.limit - currentUsage - requestedAmount
        };
    }
    
    // PROBLEM: Usage tracking without persistence
    async incrementUsage(userId, resourceType, amount) {
        const key = `${userId}:${resourceType}`;
        const current = this.quotas.get(key) || 0;
        this.quotas.set(key, current + amount);
        
        // PROBLEM: No database persistence
        return current + amount;
    }
}

const quotaManager = new UserQuotaManager();

// VULNERABLE: Bulk API operations without proper limiting
app.post('/api/bulk-process', async (req, res) => {
    const { operations } = req.body;
    
    // PROBLEM: No validation of bulk operation size
    if (!Array.isArray(operations)) {
        return res.status(400).json({ error: 'Invalid operations format' });
    }
    
    const results = [];
    
    // VULNERABLE: Processing unlimited operations
    for (const operation of operations) {
        try {
            // PROBLEM: No individual operation rate limiting
            const result = await processOperation(operation, req.user.id);
            results.push({ success: true, result });
        } catch (error) {
            results.push({ success: false, error: error.message });
        }
    }
    
    res.json({ results });
});

// PROBLEM 4: WebSocket connections without limits
const http = require('http');
const socketIo = require('socket.io');

const server = http.createServer(app);
const io = socketIo(server);

// VULNERABLE: Unlimited WebSocket connections
io.on('connection', (socket) => {
    console.log('User connected:', socket.id);
    
    // PROBLEM: No connection limits per user
    socket.on('subscribe', (data) => {
        // VULNERABLE: Unlimited subscriptions
        socket.join(data.channel);
    });
    
    socket.on('message', async (data) => {
        // PROBLEM: No message rate limiting
        try {
            await processMessage(data, socket.userId);
            io.to(data.channel).emit('message', data);
        } catch (error) {
            socket.emit('error', { message: error.message });
        }
    });
    
    // PROBLEM: No cleanup of user resources on disconnect
    socket.on('disconnect', () => {
        console.log('User disconnected:', socket.id);
    });
});

// VULNERABLE: Pagination without limits
app.get('/api/data', async (req, res) => {
    const {
        page = 1,
        limit = 10,  // PROBLEM: Default limit too low, no maximum
        sortBy,
        filter
    } = req.query;
    
    try {
        // PROBLEM: No maximum limit validation
        const pageLimit = parseInt(limit);
        const pageNumber = parseInt(page);
        
        // VULNERABLE: Can request massive page sizes
        const results = await Database.find(filter)
            .sort(sortBy)
            .skip((pageNumber - 1) * pageLimit)
            .limit(pageLimit);
        
        res.json({
            data: results,
            page: pageNumber,
            limit: pageLimit,
            total: await Database.countDocuments(filter)
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Data retrieval failed' });
    }
});

// Attack scenarios:
// 1. Use different IP addresses to bypass rate limits
// 2. Manipulate headers to trigger admin bypass
// 3. Submit massive bulk operations
// 4. Request huge page sizes to exhaust memory
// 5. Open unlimited WebSocket connections
// 6. Race conditions in quota checking

File Upload and Storage Quota Bypass Vulnerabilities

File upload systems that fail to properly validate file sizes, enforce storage quotas, or implement upload limits allow attackers to consume excessive storage resources, bypass upload restrictions, or cause denial of service through resource exhaustion.

Preview example – JAVASCRIPT
// VULNERABLE: File upload system with quota bypass vulnerabilities
const express = require('express');
const multer = require('multer');
const path = require('path');
const fs = require('fs');
const app = express();

// PROBLEM 1: Multer configuration with insufficient limits
const upload = multer({
    dest: 'uploads/',
    limits: {
        fileSize: 100 * 1024 * 1024, // PROBLEM: 100MB per file is too high
        files: 10                    // PROBLEM: No validation of total size
    },
    fileFilter: (req, file, cb) => {
        // VULNERABLE: Weak file type validation
        const allowedTypes = ['.jpg', '.png', '.pdf', '.doc'];
        const fileExt = path.extname(file.originalname).toLowerCase();
        
        // PROBLEM: Only checking extension, not content
        if (allowedTypes.includes(fileExt)) {
            cb(null, true);
        } else {
            cb(new Error('File type not allowed'));
        }
    }
});

// VULNERABLE: File upload without proper quota validation
app.post('/api/upload/single', upload.single('file'), async (req, res) => {
    try {
        if (!req.file) {
            return res.status(400).json({ error: 'No file uploaded' });
        }
        
        const userId = req.user.id;
        const file = req.file;
        
        // PROBLEM: Quota check after file is already uploaded
        const userStorage = await getUserStorageUsage(userId);
        const storageLimit = await getUserStorageLimit(userId);
        
        // VULNERABLE: Check happens after storage consumption
        if (userStorage + file.size > storageLimit) {
            // PROBLEM: File already stored on disk
            fs.unlinkSync(file.path); // Cleanup attempt
            
            return res.status(413).json({
                error: 'Storage quota exceeded',
                used: userStorage,
                limit: storageLimit,
                fileSize: file.size
            });
        }
        
        // PROBLEM: No atomic storage tracking
        await updateUserStorageUsage(userId, file.size);
        
        const savedFile = await FileMetadata.create({
            userId,
            filename: file.originalname,
            path: file.path,
            size: file.size,
            mimetype: file.mimetype,
            uploadedAt: new Date()
        });
        
        res.json({
            success: true,
            fileId: savedFile.id,
            filename: savedFile.filename,
            size: savedFile.size
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Upload failed' });
    }
});

// VULNERABLE: Multiple file upload without aggregate limits
app.post('/api/upload/multiple', upload.array('files', 50), async (req, res) => {
    try {
        if (!req.files || req.files.length === 0) {
            return res.status(400).json({ error: 'No files uploaded' });
        }
        
        const userId = req.user.id;
        const files = req.files;
        
        // PROBLEM: Total size calculation after all files uploaded
        const totalSize = files.reduce((sum, file) => sum + file.size, 0);
        const userStorage = await getUserStorageUsage(userId);
        const storageLimit = await getUserStorageLimit(userId);
        
        // VULNERABLE: Check after consumption
        if (userStorage + totalSize > storageLimit) {
            // PROBLEM: Cleanup all files individually
            files.forEach(file => {
                try {
                    fs.unlinkSync(file.path);
                } catch (cleanupError) {
                    console.error('Cleanup failed:', cleanupError);
                }
            });
            
            return res.status(413).json({
                error: 'Storage quota exceeded',
                totalSize,
                available: storageLimit - userStorage
            });
        }
        
        const savedFiles = [];
        
        // PROBLEM: Non-atomic file processing
        for (const file of files) {
            try {
                const savedFile = await FileMetadata.create({
                    userId,
                    filename: file.originalname,
                    path: file.path,
                    size: file.size,
                    mimetype: file.mimetype,
                    uploadedAt: new Date()
                });
                
                savedFiles.push(savedFile);
                
                // PROBLEM: Individual storage updates (race conditions)
                await updateUserStorageUsage(userId, file.size);
                
            } catch (fileError) {
                console.error('File processing error:', fileError);
            }
        }
        
        res.json({
            success: true,
            uploadedFiles: savedFiles.length,
            totalSize,
            files: savedFiles.map(f => ({
                id: f.id,
                filename: f.filename,
                size: f.size
            }))
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Multiple upload failed' });
    }
});

// PROBLEM 2: Chunked upload without proper validation
const activeUploads = new Map(); // In-memory tracking is problematic

app.post('/api/upload/chunk', async (req, res) => {
    const {
        uploadId,
        chunkIndex,
        totalChunks,
        totalSize    // DANGEROUS: Client-provided total size
    } = req.body;
    
    try {
        const chunk = req.file;
        
        if (!chunk) {
            return res.status(400).json({ error: 'No chunk data' });
        }
        
        // PROBLEM: Trusting client-provided total size
        if (!activeUploads.has(uploadId)) {
            // VULNERABLE: No quota check at upload start
            activeUploads.set(uploadId, {
                userId: req.user.id,
                totalSize,          // Using client value
                totalChunks,
                receivedChunks: new Set(),
                uploadedSize: 0,
                startTime: Date.now()
            });
        }
        
        const upload = activeUploads.get(uploadId);
        
        // PROBLEM: No verification of actual vs declared size
        upload.receivedChunks.add(chunkIndex);
        upload.uploadedSize += chunk.size;
        
        // VULNERABLE: Size can exceed declared total
        if (upload.uploadedSize > upload.totalSize * 1.1) { // 10% tolerance
            activeUploads.delete(uploadId);
            return res.status(413).json({ error: 'Upload size mismatch' });
        }
        
        // Store chunk
        const chunkPath = `uploads/${uploadId}_chunk_${chunkIndex}`;
        fs.renameSync(chunk.path, chunkPath);
        
        // Check if upload complete
        if (upload.receivedChunks.size === upload.totalChunks) {
            // PROBLEM: Final assembly without size verification
            await assembleChunkedFile(uploadId, upload);
            activeUploads.delete(uploadId);
            
            res.json({ success: true, status: 'completed' });
        } else {
            res.json({ 
                success: true, 
                status: 'chunk_received',
                progress: upload.receivedChunks.size / upload.totalChunks
            });
        }
        
    } catch (error) {
        res.status(500).json({ error: 'Chunk upload failed' });
    }
});

// VULNERABLE: File streaming without bandwidth limits
app.get('/api/files/:fileId/download', async (req, res) => {
    try {
        const fileId = req.params.fileId;
        const file = await FileMetadata.findById(fileId);
        
        if (!file) {
            return res.status(404).json({ error: 'File not found' });
        }
        
        // PROBLEM: No download rate limiting
        // PROBLEM: No bandwidth quota enforcement
        
        res.setHeader('Content-Disposition', `attachment; filename="${file.filename}"`);
        res.setHeader('Content-Type', file.mimetype);
        res.setHeader('Content-Length', file.size);
        
        // VULNERABLE: Unlimited bandwidth consumption
        const fileStream = fs.createReadStream(file.path);
        fileStream.pipe(res);
        
        // PROBLEM: No tracking of download bandwidth usage
        
    } catch (error) {
        res.status(500).json({ error: 'Download failed' });
    }
});

// PROBLEM 3: Image processing without resource limits
app.post('/api/images/process', upload.single('image'), async (req, res) => {
    const {
        operations,    // DANGEROUS: Client-defined operations
        format,
        quality
    } = req.body;
    
    try {
        const image = req.file;
        
        if (!image) {
            return res.status(400).json({ error: 'No image uploaded' });
        }
        
        // PROBLEM: No limits on processing operations
        const processedImage = await processImage(image.path, {
            operations: JSON.parse(operations), // VULNERABLE: Complex operations
            format,
            quality: parseInt(quality)
        });
        
        // PROBLEM: Processed image size not counted against quota
        res.json({
            success: true,
            processedImageUrl: processedImage.url,
            originalSize: image.size,
            processedSize: processedImage.size
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Image processing failed' });
    }
});

// Attack scenarios:
// 1. Upload files exceeding quota using chunked uploads
// 2. Submit files with incorrect size declarations
// 3. Exploit race conditions in storage tracking
// 4. Use multiple uploads to bypass aggregate limits
// 5. Process large images with complex operations
// 6. Download files repeatedly to exhaust bandwidth

Database Query and Computational Resource Abuse

Applications that allow unrestricted database queries, complex computations, or resource-intensive operations without proper limits enable attackers to consume excessive CPU, memory, or database resources, leading to performance degradation and denial of service.

Preview example – JAVASCRIPT
// VULNERABLE: Database query system with resource abuse vulnerabilities
const express = require('express');
const mongoose = require('mongoose');
const app = express();

// PROBLEM 1: Unrestricted database queries
app.get('/api/search', async (req, res) => {
    const {
        query,
        fields,      // DANGEROUS: Client-controlled field selection
        sort,        // DANGEROUS: Client-controlled sorting
        populate,    // DANGEROUS: Client-controlled population
        limit = 10
    } = req.query;
    
    try {
        // VULNERABLE: No query complexity validation
        const searchQuery = JSON.parse(query || '{}');
        const sortOptions = JSON.parse(sort || '{}');
        const populateOptions = JSON.parse(populate || '[]');
        
        // PROBLEM: No limit on query complexity
        let dbQuery = Product.find(searchQuery);
        
        // VULNERABLE: Client-controlled field selection
        if (fields) {
            dbQuery = dbQuery.select(fields);
        }
        
        // PROBLEM: No sorting restrictions
        if (Object.keys(sortOptions).length > 0) {
            dbQuery = dbQuery.sort(sortOptions);
        }
        
        // VULNERABLE: Unlimited population depth
        if (populateOptions.length > 0) {
            for (const populateOption of populateOptions) {
                dbQuery = dbQuery.populate(populateOption);
            }
        }
        
        // PROBLEM: No maximum limit enforcement
        const limitValue = Math.min(parseInt(limit), 1000); // Still too high
        dbQuery = dbQuery.limit(limitValue);
        
        const results = await dbQuery.exec();
        
        res.json({
            results,
            count: results.length,
            limit: limitValue
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Search failed' });
    }
});

// VULNERABLE: Aggregation pipeline without restrictions
app.post('/api/analytics/aggregate', async (req, res) => {
    const { pipeline, collection } = req.body;
    
    try {
        // PROBLEM: Client-provided aggregation pipeline
        if (!Array.isArray(pipeline)) {
            return res.status(400).json({ error: 'Invalid pipeline format' });
        }
        
        // VULNERABLE: No pipeline complexity validation
        const db = mongoose.connection.db;
        const targetCollection = db.collection(collection);
        
        // PROBLEM: Unrestricted aggregation operations
        const results = await targetCollection.aggregate(pipeline).toArray();
        
        res.json({
            success: true,
            results,
            pipeline
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Aggregation failed' });
    }
});

// PROBLEM 2: Computational resource abuse
class ComputationService {
    // VULNERABLE: No limits on computational complexity
    async performCalculation(operation, parameters) {
        switch (operation) {
            case 'fibonacci':
                // PROBLEM: No limit on input size
                return this.fibonacci(parameters.n);
                
            case 'prime_factorization':
                // PROBLEM: No timeout for long operations
                return this.primeFactorization(parameters.number);
                
            case 'matrix_multiplication':
                // PROBLEM: No matrix size limits
                return this.multiplyMatrices(parameters.matrixA, parameters.matrixB);
                
            case 'regex_test':
                // VULNERABLE: ReDoS potential
                return this.testRegex(parameters.pattern, parameters.text);
                
            default:
                throw new Error('Unknown operation');
        }
    }
    
    // PROBLEM: Recursive Fibonacci without memoization or limits
    fibonacci(n) {
        if (n <= 1) return n;
        return this.fibonacci(n - 1) + this.fibonacci(n - 2);
    }
    
    // PROBLEM: Inefficient prime factorization
    primeFactorization(number) {
        const factors = [];
        
        // VULNERABLE: No timeout or complexity limits
        for (let i = 2; i <= Math.sqrt(number); i++) {
            while (number % i === 0) {
                factors.push(i);
                number /= i;
            }
        }
        
        if (number > 1) {
            factors.push(number);
        }
        
        return factors;
    }
    
    // PROBLEM: Matrix multiplication without size limits
    multiplyMatrices(matrixA, matrixB) {
        const result = [];
        
        // VULNERABLE: No validation of matrix dimensions
        for (let i = 0; i < matrixA.length; i++) {
            result[i] = [];
            for (let j = 0; j < matrixB[0].length; j++) {
                let sum = 0;
                for (let k = 0; k < matrixB.length; k++) {
                    sum += matrixA[i][k] * matrixB[k][j];
                }
                result[i][j] = sum;
            }
        }
        
        return result;
    }
    
    // VULNERABLE: ReDoS potential
    testRegex(pattern, text) {
        try {
            const regex = new RegExp(pattern);
            return regex.test(text);
        } catch (error) {
            throw new Error('Invalid regex pattern');
        }
    }
}

const computationService = new ComputationService();

// VULNERABLE: Computation endpoint without limits
app.post('/api/compute', async (req, res) => {
    const { operation, parameters } = req.body;
    
    try {
        // PROBLEM: No timeout or resource limits
        const result = await computationService.performCalculation(operation, parameters);
        
        res.json({
            success: true,
            operation,
            result
        });
        
    } catch (error) {
        res.status(500).json({ error: error.message });
    }
});

// PROBLEM 3: Unrestricted data export
app.post('/api/export', async (req, res) => {
    const {
        format,
        query,
        fields,
        limit = 1000000  // PROBLEM: Default limit too high
    } = req.body;
    
    try {
        // VULNERABLE: No export size restrictions
        const exportQuery = JSON.parse(query || '{}');
        const fieldSelection = fields || {};
        
        // PROBLEM: Can export entire database
        const data = await DataModel.find(exportQuery)
            .select(fieldSelection)
            .limit(parseInt(limit))
            .lean(); // PROBLEM: Memory consumption for large datasets
        
        let exportData;
        
        switch (format) {
            case 'json':
                exportData = JSON.stringify(data);
                break;
            case 'csv':
                exportData = convertToCSV(data); // PROBLEM: Memory-intensive conversion
                break;
            case 'xml':
                exportData = convertToXML(data); // PROBLEM: CPU-intensive conversion
                break;
            default:
                throw new Error('Unsupported format');
        }
        
        res.setHeader('Content-Type', 'application/octet-stream');
        res.setHeader('Content-Disposition', `attachment; filename="export.${format}"`);
        
        // PROBLEM: Sending large response without streaming
        res.send(exportData);
        
    } catch (error) {
        res.status(500).json({ error: 'Export failed' });
    }
});

// VULNERABLE: Report generation without resource limits
app.post('/api/reports/generate', async (req, res) => {
    const {
        reportType,
        parameters,
        dateRange,
        groupBy,        // DANGEROUS: Client-controlled grouping
        aggregations   // DANGEROUS: Client-controlled aggregations
    } = req.body;
    
    try {
        // PROBLEM: Complex report generation without limits
        const reportData = await generateComplexReport({
            type: reportType,
            parameters,
            dateRange,
            groupBy,
            aggregations
        });
        
        res.json({
            success: true,
            report: reportData,
            generatedAt: new Date()
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Report generation failed' });
    }
});

// PROBLEM: Complex report generation function
async function generateComplexReport(options) {
    const {
        type,
        parameters,
        dateRange,
        groupBy,
        aggregations
    } = options;
    
    // VULNERABLE: No validation of operation complexity
    const pipeline = [];
    
    // Date range filter
    if (dateRange) {
        pipeline.push({
            $match: {
                createdAt: {
                    $gte: new Date(dateRange.start),
                    $lte: new Date(dateRange.end)
                }
            }
        });
    }
    
    // PROBLEM: Client-controlled grouping stages
    if (groupBy && Array.isArray(groupBy)) {
        const groupStage = { $group: { _id: {} } };
        
        for (const field of groupBy) {
            groupStage.$group._id[field] = `$${field}`;
        }
        
        // VULNERABLE: Client-controlled aggregation operations
        if (aggregations && Array.isArray(aggregations)) {
            for (const agg of aggregations) {
                groupStage.$group[agg.name] = {
                    [`$${agg.operation}`]: `$${agg.field}`
                };
            }
        }
        
        pipeline.push(groupStage);
    }
    
    // PROBLEM: No pipeline size or complexity limits
    const result = await DataModel.aggregate(pipeline);
    
    return result;
}

// Attack scenarios:
// 1. Submit complex aggregation pipelines to exhaust CPU
// 2. Request large data exports to consume memory
// 3. Use recursive operations with large inputs
// 4. Exploit ReDoS vulnerabilities with malicious regex
// 5. Generate reports with complex grouping and aggregations
// 6. Query with deep population to exhaust database connections

Memory and CPU Resource Exhaustion Attacks

Applications that process user input without proper resource management, memory limits, or CPU usage controls can be exploited to consume excessive system resources, leading to application crashes, slowdowns, or denial of service for other users.

Preview example – JAVASCRIPT
// VULNERABLE: Memory and CPU resource exhaustion vulnerabilities
const express = require('express');
const app = express();

// PROBLEM 1: Memory exhaustion through large data structures
app.post('/api/process-data', async (req, res) => {
    const { dataset, operations } = req.body;
    
    try {
        // VULNERABLE: No validation of dataset size
        if (!Array.isArray(dataset)) {
            return res.status(400).json({ error: 'Dataset must be an array' });
        }
        
        // PROBLEM: Loading entire dataset into memory
        const dataProcessor = new DataProcessor();
        const processedData = [];
        
        // VULNERABLE: No memory usage monitoring
        for (const item of dataset) {
            // PROBLEM: Accumulating processed results in memory
            const processed = await dataProcessor.processItem(item, operations);
            processedData.push(processed);
            
            // VULNERABLE: No break conditions for large datasets
        }
        
        // PROBLEM: Returning large response
        res.json({
            success: true,
            processedData,
            count: processedData.length
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Data processing failed' });
    }
});

// PROBLEM 2: CPU exhaustion through complex operations
class DataProcessor {
    async processItem(item, operations) {
        let result = { ...item };
        
        // VULNERABLE: No limits on operation complexity
        for (const operation of operations) {
            switch (operation.type) {
                case 'transform':
                    result = await this.transformData(result, operation.config);
                    break;
                case 'validate':
                    result = await this.validateData(result, operation.rules);
                    break;
                case 'enrich':
                    result = await this.enrichData(result, operation.sources);
                    break;
                case 'calculate':
                    result = await this.calculateMetrics(result, operation.formulas);
                    break;
            }
        }
        
        return result;
    }
    
    // VULNERABLE: Complex transformation without limits
    async transformData(data, config) {
        const transformed = {};
        
        // PROBLEM: No limit on transformation complexity
        for (const [key, transformation] of Object.entries(config)) {
            if (transformation.type === 'nested_loop') {
                // VULNERABLE: Nested loops with client-controlled depth
                transformed[key] = this.performNestedOperation(
                    data[transformation.field],
                    transformation.depth || 3
                );
            } else if (transformation.type === 'recursive') {
                // VULNERABLE: Recursive operations without depth limits
                transformed[key] = this.performRecursiveOperation(
                    data[transformation.field],
                    transformation.pattern
                );
            }
        }
        
        return { ...data, ...transformed };
    }
    
    // PROBLEM: Nested loops with exponential complexity
    performNestedOperation(input, depth) {
        if (depth <= 0 || !Array.isArray(input)) {
            return input;
        }
        
        const result = [];
        
        // VULNERABLE: Exponential time complexity
        for (let i = 0; i < input.length; i++) {
            for (let j = 0; j < input.length; j++) {
                const nested = this.performNestedOperation(
                    [input[i], input[j]], 
                    depth - 1
                );
                result.push(nested);
            }
        }
        
        return result;
    }
    
    // VULNERABLE: Unbounded recursion
    performRecursiveOperation(input, pattern) {
        if (!input || typeof input !== 'object') {
            return input;
        }
        
        const result = {};
        
        // PROBLEM: No recursion depth limit
        for (const [key, value] of Object.entries(input)) {
            if (pattern.recursive && typeof value === 'object') {
                result[key] = this.performRecursiveOperation(value, pattern);
            } else {
                result[key] = this.applyPattern(value, pattern);
            }
        }
        
        return result;
    }
}

// VULNERABLE: String processing with exponential complexity
app.post('/api/text/process', async (req, res) => {
    const { text, operations } = req.body;
    
    try {
        let processedText = text;
        
        // PROBLEM: No validation of text size
        if (typeof processedText !== 'string') {
            return res.status(400).json({ error: 'Text must be a string' });
        }
        
        // VULNERABLE: Processing operations without limits
        for (const operation of operations) {
            switch (operation.type) {
                case 'replace_all':
                    // PROBLEM: Multiple replace operations can be expensive
                    processedText = this.performReplaceAll(
                        processedText, 
                        operation.patterns
                    );
                    break;
                    
                case 'regex_match':
                    // VULNERABLE: ReDoS potential
                    processedText = this.performRegexOperations(
                        processedText,
                        operation.regexes
                    );
                    break;
                    
                case 'substring_search':
                    // PROBLEM: Inefficient string searching
                    processedText = this.performSubstringSearch(
                        processedText,
                        operation.patterns
                    );
                    break;
            }
        }
        
        res.json({
            success: true,
            originalLength: text.length,
            processedLength: processedText.length,
            processedText
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Text processing failed' });
    }
});

// PROBLEM: Inefficient string operations
function performReplaceAll(text, patterns) {
    let result = text;
    
    // VULNERABLE: Multiple nested replace operations
    for (const pattern of patterns) {
        // PROBLEM: String replacement in loop creates new strings
        while (result.includes(pattern.find)) {
            result = result.replace(pattern.find, pattern.replace);
        }
    }
    
    return result;
}

// VULNERABLE: ReDoS potential
function performRegexOperations(text, regexes) {
    let result = text;
    
    for (const regexConfig of regexes) {
        try {
            // PROBLEM: Client-provided regex patterns
            const regex = new RegExp(regexConfig.pattern, regexConfig.flags);
            
            // VULNERABLE: No timeout for regex operations
            if (regexConfig.operation === 'match') {
                const matches = text.match(regex);
                result += `\nMatches: ${matches ? matches.length : 0}`;
            } else if (regexConfig.operation === 'replace') {
                result = result.replace(regex, regexConfig.replacement);
            }
        } catch (error) {
            console.error('Regex operation failed:', error);
        }
    }
    
    return result;
}

// PROBLEM: Memory exhaustion through large arrays
app.post('/api/array/operations', async (req, res) => {
    const { arrays, operations } = req.body;
    
    try {
        const results = [];
        
        // VULNERABLE: No limit on array size or count
        for (const array of arrays) {
            let processedArray = [...array]; // PROBLEM: Creating copy of large array
            
            for (const operation of operations) {
                switch (operation.type) {
                    case 'sort':
                        // PROBLEM: Sorting large arrays is memory intensive
                        processedArray = processedArray.sort(operation.compareFn);
                        break;
                        
                    case 'filter':
                        // PROBLEM: Filter creates new array
                        processedArray = processedArray.filter(
                            this.createFilterFunction(operation.criteria)
                        );
                        break;
                        
                    case 'transform':
                        // VULNERABLE: Map operations on large arrays
                        processedArray = processedArray.map(
                            this.createTransformFunction(operation.transformation)
                        );
                        break;
                        
                    case 'cartesian_product':
                        // PROBLEM: Cartesian product has exponential space complexity
                        processedArray = this.calculateCartesianProduct(
                            processedArray,
                            operation.otherArray
                        );
                        break;
                }
            }
            
            results.push(processedArray);
        }
        
        // PROBLEM: Returning large result set
        res.json({
            success: true,
            results,
            totalItems: results.reduce((sum, arr) => sum + arr.length, 0)
        });
        
    } catch (error) {
        res.status(500).json({ error: 'Array operations failed' });
    }
});

// VULNERABLE: Exponential space complexity
function calculateCartesianProduct(array1, array2) {
    const result = [];
    
    // PROBLEM: No limit on result size
    for (const item1 of array1) {
        for (const item2 of array2) {
            result.push([item1, item2]);
        }
    }
    
    return result;
}

// Attack scenarios:
// 1. Submit large datasets to exhaust memory
// 2. Use complex nested operations to consume CPU
// 3. Exploit ReDoS with malicious regex patterns
// 4. Request cartesian products of large arrays
// 5. Chain multiple expensive operations
// 6. Use recursive operations with deep nesting

Fixes

1

Implement Comprehensive Resource Monitoring and Limits

Create a robust resource management system that monitors CPU, memory, and I/O usage in real-time, enforces configurable limits per user and operation type, and provides graceful degradation when limits are approached.

View implementation – JAVASCRIPT
// SECURE: Comprehensive resource monitoring and limit enforcement
const os = require('os');
const { performance } = require('perf_hooks');
const redis = require('redis');
const { promisify } = require('util');

const redisClient = redis.createClient(process.env.REDIS_URL);

class ResourceLimitManager {
    constructor() {
        this.limits = {
            // Per-user limits (per hour)
            user: {
                maxAPIRequests: 1000,
                maxUploadSize: 100 * 1024 * 1024, // 100MB
                maxDownloadBandwidth: 1024 * 1024 * 1024, // 1GB
                maxCPUTime: 60000, // 60 seconds
                maxMemoryUsage: 256 * 1024 * 1024, // 256MB
                maxDatabaseQueries: 500,
                maxStorageSpace: 5 * 1024 * 1024 * 1024 // 5GB
            },
            // Per-operation limits
            operation: {
                maxExecutionTime: 30000, // 30 seconds
                maxMemoryPerOperation: 128 * 1024 * 1024, // 128MB
                maxCPUPerOperation: 5000, // 5 seconds
                maxDatabaseConnections: 10,
                maxFileSize: 50 * 1024 * 1024, // 50MB
                maxQueryComplexity: 100
            },
            // Global system limits
            system: {
                maxConcurrentOperations: 1000,
                maxMemoryUsage: 0.8, // 80% of available memory
                maxCPUUsage: 0.9, // 90% of available CPU
                maxOpenFiles: 10000,
                emergencyThreshold: 0.95
            }
        };
        
        this.activeOperations = new Map();
        this.resourceUsage = new Map();
        
        // Start monitoring
        this.startResourceMonitoring();
    }
    
    // Monitor system resources continuously
    startResourceMonitoring() {
        setInterval(async () => {
            try {
                const systemStats = await this.getSystemResourceUsage();
                await this.updateSystemMetrics(systemStats);
                
                // Check for emergency conditions
                if (this.isEmergencyCondition(systemStats)) {
                    await this.handleEmergencyCondition(systemStats);
                }
                
            } catch (error) {
                console.error('Resource monitoring error:', error);
            }
        }, 5000); // Monitor every 5 seconds
    }
    
    // Get current system resource usage
    async getSystemResourceUsage() {
        const cpuUsage = process.cpuUsage();
        const memoryUsage = process.memoryUsage();
        const systemMemory = {
            total: os.totalmem(),
            free: os.freemem()
        };
        
        return {
            cpu: {
                user: cpuUsage.user / 1000000, // Convert to seconds
                system: cpuUsage.system / 1000000,
                usage: await this.getCPUUsagePercentage()
            },
            memory: {
                rss: memoryUsage.rss,
                heapTotal: memoryUsage.heapTotal,
                heapUsed: memoryUsage.heapUsed,
                external: memoryUsage.external,
                systemTotal: systemMemory.total,
                systemFree: systemMemory.free,
                usage: (systemMemory.total - systemMemory.free) / systemMemory.total
            },
            activeOperations: this.activeOperations.size,
            timestamp: Date.now()
        };
    }
    
    // Check resource limits before operation execution
    async checkResourceLimits(userId, operationType, operationData = {}) {
        try {
            // Check user-specific limits
            const userLimitCheck = await this.checkUserLimits(userId, operationType, operationData);
            if (!userLimitCheck.allowed) {
                return userLimitCheck;
            }
            
            // Check operation-specific limits
            const operationLimitCheck = await this.checkOperationLimits(operationType, operationData);
            if (!operationLimitCheck.allowed) {
                return operationLimitCheck;
            }
            
            // Check system-wide limits
            const systemLimitCheck = await this.checkSystemLimits();
            if (!systemLimitCheck.allowed) {
                return systemLimitCheck;
            }
            
            return {
                allowed: true,
                limits: {
                    user: userLimitCheck.limits,
                    operation: operationLimitCheck.limits,
                    system: systemLimitCheck.limits
                }
            };
            
        } catch (error) {
            console.error('Resource limit check error:', error);
            return {
                allowed: false,
                reason: 'resource_check_failed',
                error: error.message
            };
        }
    }
    
    // Check user-specific resource limits
    async checkUserLimits(userId, operationType, operationData) {
        const hour = Math.floor(Date.now() / (60 * 60 * 1000));
        const userKey = `user_limits:${userId}:${hour}`;
        
        // Get current user usage
        const currentUsage = await this.getUserCurrentUsage(userId, hour);
        
        // Check specific limits based on operation type
        const checks = [];
        
        switch (operationType) {
            case 'api_request':
                checks.push({
                    limit: this.limits.user.maxAPIRequests,
                    current: currentUsage.apiRequests || 0,
                    type: 'api_requests'
                });
                break;
                
            case 'file_upload':
                const uploadSize = operationData.fileSize || 0;
                checks.push({
                    limit: this.limits.user.maxUploadSize,
                    current: currentUsage.uploadSize || 0,
                    increment: uploadSize,
                    type: 'upload_size'
                });
                break;
                
            case 'database_query':
                checks.push({
                    limit: this.limits.user.maxDatabaseQueries,
                    current: currentUsage.databaseQueries || 0,
                    type: 'database_queries'
                });
                break;
        }
        
        // Validate each limit
        for (const check of checks) {
            const projectedUsage = check.current + (check.increment || 1);
            
            if (projectedUsage > check.limit) {
                return {
                    allowed: false,
                    reason: 'user_limit_exceeded',
                    limitType: check.type,
                    current: check.current,
                    limit: check.limit,
                    requested: check.increment || 1
                };
            }
        }
        
        return {
            allowed: true,
            limits: checks.map(check => ({
                type: check.type,
                current: check.current,
                limit: check.limit,
                remaining: check.limit - check.current
            }))
        };
    }
    
    // Track resource usage during operation execution
    async trackOperationExecution(operationId, userId, operationType) {
        const startTime = performance.now();
        const startMemory = process.memoryUsage();
        const startCPU = process.cpuUsage();
        
        const operation = {
            id: operationId,
            userId,
            type: operationType,
            startTime,
            startMemory,
            startCPU,
            isActive: true
        };
        
        this.activeOperations.set(operationId, operation);
        
        // Return tracking functions
        return {
            // Update operation progress
            updateProgress: (progress) => {
                if (this.activeOperations.has(operationId)) {
                    this.activeOperations.get(operationId).progress = progress;
                }
            },
            
            // Complete operation tracking
            complete: async () => {
                const endTime = performance.now();
                const endMemory = process.memoryUsage();
                const endCPU = process.cpuUsage(startCPU);
                
                const operationStats = {
                    duration: endTime - startTime,
                    memoryUsed: endMemory.heapUsed - startMemory.heapUsed,
                    cpuTime: (endCPU.user + endCPU.system) / 1000000 // Convert to seconds
                };
                
                // Update user usage statistics
                await this.updateUserUsage(userId, operationType, operationStats);
                
                // Remove from active operations
                this.activeOperations.delete(operationId);
                
                return operationStats;
            },
            
            // Abort operation if limits exceeded
            abort: async (reason) => {
                if (this.activeOperations.has(operationId)) {
                    this.activeOperations.get(operationId).aborted = true;
                    this.activeOperations.get(operationId).abortReason = reason;
                }
                
                // Log abort event
                await this.logResourceEvent({
                    type: 'operation_aborted',
                    operationId,
                    userId,
                    operationType,
                    reason,
                    timestamp: Date.now()
                });
            }
        };
    }
    
    // Implement circuit breaker pattern for resource protection
    async getCircuitBreakerStatus(resourceType) {
        const breakerKey = `circuit_breaker:${resourceType}`;
        const breakerData = await redisClient.get(breakerKey);
        
        if (!breakerData) {
            return {
                state: 'closed', // Normal operation
                failureCount: 0,
                lastFailure: null
            };
        }
        
        const breaker = JSON.parse(breakerData);
        const now = Date.now();
        
        // Check if breaker should be reset
        if (breaker.state === 'open' && 
            now - breaker.lastFailure > 60000) { // 1 minute timeout
            breaker.state = 'half-open';
            await redisClient.setex(breakerKey, 3600, JSON.stringify(breaker));
        }
        
        return breaker;
    }
    
    // Update circuit breaker state
    async updateCircuitBreaker(resourceType, success) {
        const breakerKey = `circuit_breaker:${resourceType}`;
        let breaker = await this.getCircuitBreakerStatus(resourceType);
        
        if (success) {
            if (breaker.state === 'half-open') {
                breaker.state = 'closed';
                breaker.failureCount = 0;
            }
        } else {
            breaker.failureCount++;
            breaker.lastFailure = Date.now();
            
            if (breaker.failureCount >= 5) { // Threshold
                breaker.state = 'open';
            }
        }
        
        await redisClient.setex(breakerKey, 3600, JSON.stringify(breaker));
        
        return breaker;
    }
}

// Secure operation execution with resource limits
class SecureOperationExecutor {
    constructor() {
        this.resourceManager = new ResourceLimitManager();
        this.operationQueue = [];
        this.maxConcurrentOperations = 100;
    }
    
    // Execute operation with comprehensive resource management
    async executeWithLimits(operationType, operationFn, userId, operationData = {}) {
        const operationId = require('crypto').randomUUID();
        
        try {
            // Check resource limits before execution
            const limitCheck = await this.resourceManager.checkResourceLimits(
                userId,
                operationType,
                operationData
            );
            
            if (!limitCheck.allowed) {
                throw new Error(`Resource limit exceeded: ${limitCheck.reason}`);
            }
            
            // Check circuit breaker
            const circuitBreaker = await this.resourceManager.getCircuitBreakerStatus(operationType);
            if (circuitBreaker.state === 'open') {
                throw new Error(`Service temporarily unavailable: ${operationType}`);
            }
            
            // Start resource tracking
            const tracker = await this.resourceManager.trackOperationExecution(
                operationId,
                userId,
                operationType
            );
            
            // Execute operation with timeout
            const timeoutMs = this.resourceManager.limits.operation.maxExecutionTime;
            
            const result = await Promise.race([
                operationFn(tracker),
                new Promise((_, reject) => 
                    setTimeout(() => reject(new Error('Operation timeout')), timeoutMs)
                )
            ]);
            
            // Complete tracking
            const operationStats = await tracker.complete();
            
            // Update circuit breaker - success
            await this.resourceManager.updateCircuitBreaker(operationType, true);
            
            return {
                success: true,
                result,
                operationStats,
                operationId
            };
            
        } catch (error) {
            // Update circuit breaker - failure
            await this.resourceManager.updateCircuitBreaker(operationType, false);
            
            // Abort tracking if it exists
            if (this.resourceManager.activeOperations.has(operationId)) {
                const tracker = { abort: () => {} }; // Simplified for brevity
                await tracker.abort(error.message);
            }
            
            throw error;
        }
    }
}
2

Implement Secure File Upload and Storage Quota Management

Create a comprehensive file upload system that enforces storage quotas atomically, validates file content beyond extensions, implements streaming uploads with proper limits, and provides secure cleanup mechanisms.

View implementation – JAVASCRIPT
// SECURE: Comprehensive file upload and storage quota management
const multer = require('multer');
const crypto = require('crypto');
const fs = require('fs').promises;
const path = require('path');
const stream = require('stream');
const { promisify } = require('util');
const pipeline = promisify(stream.pipeline);

class SecureFileUploadManager {
    constructor() {
        this.uploadLimits = {
            maxFileSize: 50 * 1024 * 1024, // 50MB per file
            maxTotalSize: 100 * 1024 * 1024, // 100MB per request
            maxFiles: 10,
            allowedMimeTypes: [
                'image/jpeg', 'image/png', 'image/webp',
                'application/pdf', 'text/plain',
                'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
            ],
            maxFilenameLength: 255,
            quarantineTime: 24 * 60 * 60 * 1000 // 24 hours
        };
        
        this.resourceManager = new ResourceLimitManager();
        this.virusScanner = new VirusScanner();
    }
    
    // Create secure multer configuration
    createSecureMulterConfig() {
        const storage = multer.memoryStorage(); // Use memory storage for better control
        
        return multer({
            storage,
            limits: {
                fileSize: this.uploadLimits.maxFileSize,
                files: this.uploadLimits.maxFiles,
                fieldSize: 1024 * 1024, // 1MB for field data
                fields: 20 // Maximum number of non-file fields
            },
            fileFilter: async (req, file, cb) => {
                try {
                    // Basic file validation
                    const validation = await this.validateFileBasics(file, req.user.id);
                    
                    if (!validation.isValid) {
                        return cb(new Error(validation.reason), false);
                    }
                    
                    cb(null, true);
                    
                } catch (error) {
                    cb(error, false);
                }
            }
        });
    }
    
    // Validate file basics before processing
    async validateFileBasics(file, userId) {
        // Check filename length
        if (file.originalname.length > this.uploadLimits.maxFilenameLength) {
            return {
                isValid: false,
                reason: 'Filename too long'
            };
        }
        
        // Check for dangerous filename patterns
        const dangerousPatterns = [
            /\.\./, // Path traversal
            /[<>:"|\?\*]/, // Invalid characters
            /^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i // Reserved names
        ];
        
        if (dangerousPatterns.some(pattern => pattern.test(file.originalname))) {
            return {
                isValid: false,
                reason: 'Invalid filename pattern'
            };
        }
        
        // Check MIME type
        if (!this.uploadLimits.allowedMimeTypes.includes(file.mimetype)) {
            return {
                isValid: false,
                reason: `MIME type ${file.mimetype} not allowed`
            };
        }
        
        // Check user storage quota
        const quotaCheck = await this.checkUserStorageQuota(userId, file.size);
        if (!quotaCheck.allowed) {
            return {
                isValid: false,
                reason: quotaCheck.reason
            };
        }
        
        return { isValid: true };
    }
    
    // Check user storage quota atomically
    async checkUserStorageQuota(userId, additionalSize) {
        const lockKey = `storage_quota_lock:${userId}`;
        const lockValue = crypto.randomUUID();
        const lockTimeout = 30; // 30 seconds
        
        try {
            // Acquire distributed lock
            const lockAcquired = await this.acquireLock(lockKey, lockValue, lockTimeout);
            if (!lockAcquired) {
                return {
                    allowed: false,
                    reason: 'Unable to acquire storage lock'
                };
            }
            
            // Get current usage atomically
            const currentUsage = await this.getUserStorageUsage(userId);
            const storageLimit = await this.getUserStorageLimit(userId);
            
            if (currentUsage + additionalSize > storageLimit) {
                return {
                    allowed: false,
                    reason: 'Storage quota exceeded',
                    current: currentUsage,
                    limit: storageLimit,
                    requested: additionalSize
                };
            }
            
            return {
                allowed: true,
                current: currentUsage,
                limit: storageLimit,
                remaining: storageLimit - currentUsage - additionalSize
            };
            
        } finally {
            await this.releaseLock(lockKey, lockValue);
        }
    }
    
    // Process uploaded file with comprehensive security checks
    async processUploadedFile(file, userId, options = {}) {
        const operationId = crypto.randomUUID();
        
        try {
            // Start resource tracking
            const tracker = await this.resourceManager.trackOperationExecution(
                operationId,
                userId,
                'file_upload'
            );
            
            // Deep content validation
            const contentValidation = await this.validateFileContent(file);
            if (!contentValidation.isValid) {
                throw new Error(`Content validation failed: ${contentValidation.reason}`);
            }
            
            // Virus scanning
            const virusCheck = await this.virusScanner.scanBuffer(file.buffer);
            if (virusCheck.infected) {
                throw new Error(`Virus detected: ${virusCheck.signature}`);
            }
            
            // Generate secure file path
            const secureFilePath = await this.generateSecureFilePath(file, userId);
            
            // Write file atomically with integrity verification
            const fileInfo = await this.writeFileSecurely(file, secureFilePath, userId);
            
            // Update storage quota atomically
            await this.updateUserStorageUsage(userId, file.size, 'add');
            
            // Create file metadata record
            const fileRecord = await this.createFileRecord({
                ...fileInfo,
                userId,
                operationId,
                contentValidation,
                virusCheck
            });
            
            // Complete resource tracking
            const operationStats = await tracker.complete();
            
            return {
                success: true,
                fileId: fileRecord.id,
                filename: fileRecord.filename,
                size: fileRecord.size,
                contentType: fileRecord.contentType,
                operationStats
            };
            
        } catch (error) {
            // Clean up on error
            if (tracker) {
                await tracker.abort(error.message);
            }
            
            throw error;
        }
    }
    
    // Validate file content beyond MIME type
    async validateFileContent(file) {
        try {
            // Check file signature (magic bytes)
            const signature = await this.detectFileSignature(file.buffer);
            
            if (!this.isSignatureAllowed(signature, file.mimetype)) {
                return {
                    isValid: false,
                    reason: `File signature ${signature} doesn't match MIME type ${file.mimetype}`
                };
            }
            
            // Content-specific validation
            switch (signature) {
                case 'image/jpeg':
                case 'image/png':
                    return await this.validateImageContent(file.buffer);
                    
                case 'application/pdf':
                    return await this.validatePDFContent(file.buffer);
                    
                case 'text/plain':
                    return await this.validateTextContent(file.buffer);
                    
                default:
                    return { isValid: true };
            }
            
        } catch (error) {
            return {
                isValid: false,
                reason: `Content validation error: ${error.message}`
            };
        }
    }
    
    // Detect file signature from buffer
    async detectFileSignature(buffer) {
        const signatures = {
            'image/jpeg': [0xFF, 0xD8, 0xFF],
            'image/png': [0x89, 0x50, 0x4E, 0x47],
            'application/pdf': [0x25, 0x50, 0x44, 0x46],
            'application/zip': [0x50, 0x4B, 0x03, 0x04]
        };
        
        for (const [mimeType, sig] of Object.entries(signatures)) {
            if (buffer.length >= sig.length) {
                const match = sig.every((byte, index) => buffer[index] === byte);
                if (match) {
                    return mimeType;
                }
            }
        }
        
        return 'unknown';
    }
    
    // Write file securely with integrity verification
    async writeFileSecurely(file, filePath, userId) {
        try {
            // Create directory if it doesn't exist
            await fs.mkdir(path.dirname(filePath), { recursive: true });
            
            // Write file atomically
            const tempPath = `${filePath}.tmp`;
            await fs.writeFile(tempPath, file.buffer);
            
            // Verify file integrity
            const hash = crypto.createHash('sha256');
            hash.update(file.buffer);
            const originalHash = hash.digest('hex');
            
            const writtenBuffer = await fs.readFile(tempPath);
            const verifyHash = crypto.createHash('sha256');
            verifyHash.update(writtenBuffer);
            const writtenHash = verifyHash.digest('hex');
            
            if (originalHash !== writtenHash) {
                await fs.unlink(tempPath);
                throw new Error('File integrity verification failed');
            }
            
            // Atomic move to final location
            await fs.rename(tempPath, filePath);
            
            return {
                path: filePath,
                size: file.size,
                hash: originalHash,
                filename: file.originalname,
                contentType: file.mimetype
            };
            
        } catch (error) {
            // Clean up temp file if it exists
            try {
                await fs.unlink(`${filePath}.tmp`);
            } catch (cleanupError) {
                // Ignore cleanup errors
            }
            
            throw error;
        }
    }
    
    // Update user storage usage atomically
    async updateUserStorageUsage(userId, sizeChange, operation) {
        const lockKey = `storage_quota_lock:${userId}`;
        const lockValue = crypto.randomUUID();
        
        try {
            const lockAcquired = await this.acquireLock(lockKey, lockValue, 30);
            if (!lockAcquired) {
                throw new Error('Unable to acquire storage lock for update');
            }
            
            const session = await mongoose.startSession();
            await session.withTransaction(async () => {
                const user = await User.findById(userId).session(session);
                
                if (!user) {
                    throw new Error('User not found');
                }
                
                const currentUsage = user.storageUsage || 0;
                let newUsage;
                
                switch (operation) {
                    case 'add':
                        newUsage = currentUsage + sizeChange;
                        break;
                    case 'subtract':
                        newUsage = Math.max(0, currentUsage - sizeChange);
                        break;
                    default:
                        throw new Error('Invalid operation');
                }
                
                await User.findByIdAndUpdate(
                    userId,
                    {
                        storageUsage: newUsage,
                        lastStorageUpdate: new Date()
                    },
                    { session }
                );
                
                // Log storage change
                await StorageLog.create([{
                    userId,
                    operation,
                    sizeChange,
                    previousUsage: currentUsage,
                    newUsage,
                    timestamp: new Date()
                }], { session });
            });
            
        } finally {
            await this.releaseLock(lockKey, lockValue);
        }
    }
}
3

Implement Smart Query Optimization and Database Resource Protection

Create an intelligent database query management system that analyzes query complexity, implements adaptive limits based on system load, and provides query optimization suggestions while preventing resource exhaustion attacks.

View implementation – JAVASCRIPT
// SECURE: Smart query optimization and database resource protection
const mongoose = require('mongoose');
const { performance } = require('perf_hooks');
const crypto = require('crypto');

class SecureQueryManager {
    constructor() {
        this.queryLimits = {
            maxDocuments: 1000,
            maxQueryDepth: 5,
            maxPopulateDepth: 3,
            maxSortFields: 5,
            maxAggregationStages: 10,
            maxExecutionTime: 30000, // 30 seconds
            maxMemoryUsage: 256 * 1024 * 1024 // 256MB
        };
        
        this.queryCache = new Map();
        this.queryStats = new Map();
        this.adaptiveLimits = new Map();
        
        // Initialize adaptive limits based on system resources
        this.initializeAdaptiveLimits();
    }
    
    // Analyze and validate query before execution
    async analyzeQuery(queryData, userId) {
        try {
            const {
                collection,
                query,
                options = {},
                populate = [],
                aggregation = []
            } = queryData;
            
            // Generate query fingerprint for caching and tracking
            const queryFingerprint = this.generateQueryFingerprint(queryData);
            
            // Check query complexity
            const complexityAnalysis = await this.analyzeQueryComplexity({
                query,
                options,
                populate,
                aggregation
            });
            
            if (complexityAnalysis.score > 100) {
                return {
                    allowed: false,
                    reason: 'Query too complex',
                    complexity: complexityAnalysis,
                    suggestions: this.generateOptimizationSuggestions(complexityAnalysis)
                };
            }
            
            // Check user query quota
            const quotaCheck = await this.checkUserQueryQuota(userId);
            if (!quotaCheck.allowed) {
                return {
                    allowed: false,
                    reason: 'User query quota exceeded',
                    quota: quotaCheck
                };
            }
            
            // Check system load adaptive limits
            const adaptiveLimitCheck = await this.checkAdaptiveLimits(complexityAnalysis.score);
            if (!adaptiveLimitCheck.allowed) {
                return {
                    allowed: false,
                    reason: 'System under high load',
                    retryAfter: adaptiveLimitCheck.retryAfter
                };
            }
            
            return {
                allowed: true,
                queryFingerprint,
                complexity: complexityAnalysis,
                optimizedQuery: this.optimizeQuery(queryData, complexityAnalysis)
            };
            
        } catch (error) {
            return {
                allowed: false,
                reason: 'Query analysis failed',
                error: error.message
            };
        }
    }
    
    // Analyze query complexity
    async analyzeQueryComplexity(queryComponents) {
        let complexityScore = 0;
        const factors = [];
        
        // Analyze query conditions
        if (queryComponents.query) {
            const queryComplexity = this.analyzeQueryConditions(queryComponents.query);
            complexityScore += queryComplexity.score;
            factors.push(...queryComplexity.factors);
        }
        
        // Analyze sorting
        if (queryComponents.options.sort) {
            const sortComplexity = this.analyzeSortComplexity(queryComponents.options.sort);
            complexityScore += sortComplexity.score;
            factors.push(...sortComplexity.factors);
        }
        
        // Analyze population
        if (queryComponents.populate && queryComponents.populate.length > 0) {
            const populateComplexity = this.analyzePopulateComplexity(queryComponents.populate);
            complexityScore += populateComplexity.score;
            factors.push(...populateComplexity.factors);
        }
        
        // Analyze aggregation pipeline
        if (queryComponents.aggregation && queryComponents.aggregation.length > 0) {
            const aggregationComplexity = this.analyzeAggregationComplexity(queryComponents.aggregation);
            complexityScore += aggregationComplexity.score;
            factors.push(...aggregationComplexity.factors);
        }
        
        // Analyze limit and skip
        if (queryComponents.options.limit) {
            const limit = parseInt(queryComponents.options.limit);
            if (limit > this.queryLimits.maxDocuments) {
                complexityScore += 20;
                factors.push({
                    type: 'high_limit',
                    value: limit,
                    impact: 'high'
                });
            }
        }
        
        if (queryComponents.options.skip) {
            const skip = parseInt(queryComponents.options.skip);
            if (skip > 10000) {
                complexityScore += 15;
                factors.push({
                    type: 'high_skip',
                    value: skip,
                    impact: 'medium'
                });
            }
        }
        
        return {
            score: complexityScore,
            factors,
            category: this.categorizeComplexity(complexityScore)
        };
    }
    
    // Analyze query conditions complexity
    analyzeQueryConditions(query, depth = 0) {
        let score = 0;
        const factors = [];
        
        if (depth > this.queryLimits.maxQueryDepth) {
            score += 30;
            factors.push({
                type: 'deep_nesting',
                depth,
                impact: 'high'
            });
        }
        
        for (const [key, value] of Object.entries(query)) {
            switch (key) {
                case '$or':
                case '$and':
                    if (Array.isArray(value) && value.length > 5) {
                        score += 15;
                        factors.push({
                            type: 'complex_logical_operation',
                            operator: key,
                            conditions: value.length,
                            impact: 'medium'
                        });
                    }
                    // Recursively analyze nested conditions
                    for (const condition of value) {
                        const nested = this.analyzeQueryConditions(condition, depth + 1);
                        score += nested.score;
                        factors.push(...nested.factors);
                    }
                    break;
                    
                case '$regex':
                    score += 10;
                    factors.push({
                        type: 'regex_query',
                        impact: 'medium'
                    });
                    break;
                    
                case '$text':
                    score += 5;
                    factors.push({
                        type: 'text_search',
                        impact: 'low'
                    });
                    break;
                    
                case '$near':
                case '$geoNear':
                    score += 15;
                    factors.push({
                        type: 'geospatial_query',
                        impact: 'medium'
                    });
                    break;
                    
                default:
                    if (typeof value === 'object' && value !== null) {
                        // Check for range queries
                        if (value.$gte || value.$lte || value.$gt || value.$lt) {
                            score += 2;
                            factors.push({
                                type: 'range_query',
                                field: key,
                                impact: 'low'
                            });
                        }
                        
                        // Check for in queries with large arrays
                        if (value.$in && Array.isArray(value.$in) && value.$in.length > 100) {
                            score += 10;
                            factors.push({
                                type: 'large_in_query',
                                field: key,
                                values: value.$in.length,
                                impact: 'medium'
                            });
                        }
                    }
                    break;
            }
        }
        
        return { score, factors };
    }
    
    // Execute query with resource monitoring
    async executeSecureQuery(queryData, userId, analysis) {
        const queryId = crypto.randomUUID();
        const startTime = performance.now();
        const startMemory = process.memoryUsage();
        
        try {
            // Create query with timeout
            const timeoutPromise = new Promise((_, reject) => {
                setTimeout(() => {
                    reject(new Error('Query execution timeout'));
                }, this.queryLimits.maxExecutionTime);
            });
            
            // Execute query with monitoring
            const queryPromise = this.executeMonitoredQuery(queryData, queryId);
            
            const result = await Promise.race([queryPromise, timeoutPromise]);
            
            // Calculate execution metrics
            const endTime = performance.now();
            const endMemory = process.memoryUsage();
            
            const executionMetrics = {
                duration: endTime - startTime,
                memoryUsed: endMemory.heapUsed - startMemory.heapUsed,
                documentsReturned: Array.isArray(result) ? result.length : 1,
                complexity: analysis.complexity.score
            };
            
            // Update query statistics
            await this.updateQueryStatistics(userId, queryData, executionMetrics);
            
            // Update user query quota
            await this.updateUserQueryQuota(userId, executionMetrics);
            
            return {
                success: true,
                data: result,
                metrics: executionMetrics,
                queryId
            };
            
        } catch (error) {
            // Log query failure
            await this.logQueryFailure({
                queryId,
                userId,
                query: queryData,
                error: error.message,
                duration: performance.now() - startTime
            });
            
            throw error;
        }
    }
    
    // Execute query with resource monitoring
    async executeMonitoredQuery(queryData, queryId) {
        const {
            collection,
            query,
            options = {},
            populate = [],
            aggregation = []
        } = queryData;
        
        // Get model reference
        const Model = mongoose.model(collection);
        
        if (aggregation.length > 0) {
            // Execute aggregation pipeline
            return await Model.aggregate(aggregation)
                .allowDiskUse(false) // Prevent disk usage
                .option({ maxTimeMS: this.queryLimits.maxExecutionTime })
                .exec();
        } else {
            // Execute regular query
            let mongoQuery = Model.find(query);
            
            // Apply options
            if (options.sort) {
                mongoQuery = mongoQuery.sort(options.sort);
            }
            
            if (options.limit) {
                const limit = Math.min(
                    parseInt(options.limit),
                    this.queryLimits.maxDocuments
                );
                mongoQuery = mongoQuery.limit(limit);
            }
            
            if (options.skip) {
                mongoQuery = mongoQuery.skip(parseInt(options.skip));
            }
            
            if (options.select) {
                mongoQuery = mongoQuery.select(options.select);
            }
            
            // Apply population with depth limits
            for (const pop of populate) {
                if (this.validatePopulateDepth(pop)) {
                    mongoQuery = mongoQuery.populate(pop);
                }
            }
            
            // Set query timeout
            mongoQuery = mongoQuery.maxTimeMS(this.queryLimits.maxExecutionTime);
            
            return await mongoQuery.exec();
        }
    }
    
    // Optimize query based on complexity analysis
    optimizeQuery(queryData, complexityAnalysis) {
        const optimized = { ...queryData };
        
        // Apply automatic optimizations based on complexity factors
        for (const factor of complexityAnalysis.factors) {
            switch (factor.type) {
                case 'high_limit':
                    optimized.options = {
                        ...optimized.options,
                        limit: Math.min(factor.value, this.queryLimits.maxDocuments)
                    };
                    break;
                    
                case 'complex_logical_operation':
                    // Suggest using indexes
                    optimized.suggestions = optimized.suggestions || [];
                    optimized.suggestions.push(
                        `Consider adding compound index for ${factor.operator} operation`
                    );
                    break;
                    
                case 'regex_query':
                    optimized.suggestions = optimized.suggestions || [];
                    optimized.suggestions.push(
                        'Consider using text search instead of regex for better performance'
                    );
                    break;
            }
        }
        
        return optimized;
    }
    
    // Check adaptive limits based on system load
    async checkAdaptiveLimits(queryComplexity) {
        const systemLoad = await this.getSystemLoad();
        const loadThreshold = 0.8; // 80% load threshold
        
        if (systemLoad.cpu > loadThreshold || systemLoad.memory > loadThreshold) {
            // Reduce limits during high load
            const reducedLimits = {
                maxDocuments: Math.floor(this.queryLimits.maxDocuments * 0.5),
                maxExecutionTime: Math.floor(this.queryLimits.maxExecutionTime * 0.7),
                complexityThreshold: 50 // Reduced from 100
            };
            
            if (queryComplexity > reducedLimits.complexityThreshold) {
                return {
                    allowed: false,
                    reason: 'System under high load, complex queries temporarily restricted',
                    retryAfter: 60000, // 1 minute
                    systemLoad
                };
            }
        }
        
        return { allowed: true };
    }
}

Detect This Vulnerability in Your Code

Sourcery automatically identifies resource limit and quota bypass vulnerabilities in application systems and many other security issues in your codebase.