Performance Optimization Examples
Here are some Sharp performance optimization examples and best practices.
Memory Optimization
Stream Processing Large Files
javascript
import sharp from 'sharp';
import fs from 'fs';
// Stream processing, avoid loading the entire file into memory
fs.createReadStream('large-image.jpg')
.pipe(sharp().resize(800, 600))
.pipe(fs.createWriteStream('output.jpg'));Use Buffer Instead of File
javascript
// For small files, using Buffer is more efficient
const inputBuffer = fs.readFileSync('input.jpg');
const outputBuffer = await sharp(inputBuffer)
.resize(300, 200)
.jpeg({ quality: 80 })
.toBuffer();
fs.writeFileSync('output.jpg', outputBuffer);Release Resources in Time
javascript
// Release resources after processing
const image = sharp('input.jpg');
await image.resize(300, 200).toFile('output.jpg');
// image instance will be automatically garbage collectedConcurrency Control
Limit Concurrency
javascript
// Set maximum concurrency
sharp.concurrency(4);
// Control concurrency during batch processing
async function batchProcess(files) {
const batchSize = 4;
const results = [];
for (let i = 0; i < files.length; i += batchSize) {
const batch = files.slice(i, i + batchSize);
const batchPromises = batch.map(file =>
sharp(file).resize(300, 200).jpeg().toFile(`output_${file}`)
);
await Promise.all(batchPromises);
results.push(...batch);
}
return results;
}Use Queue Processing
javascript
class ImageProcessor {
constructor(concurrency = 4) {
this.concurrency = concurrency;
this.queue = [];
this.running = 0;
}
async add(task) {
return new Promise((resolve, reject) => {
this.queue.push({ task, resolve, reject });
this.process();
});
}
async process() {
if (this.running >= this.concurrency || this.queue.length === 0) {
return;
}
this.running++;
const { task, resolve, reject } = this.queue.shift();
try {
const result = await task();
resolve(result);
} catch (error) {
reject(error);
} finally {
this.running--;
this.process();
}
}
}
// Usage example
const processor = new ImageProcessor(4);
for (const file of files) {
processor.add(async () => {
await sharp(file).resize(300, 200).jpeg().toFile(`output_${file}`);
});
}Cache Optimization
Cache Processing Results
javascript
const cache = new Map();
async function processWithCache(inputPath, width, height) {
const key = `${inputPath}_${width}_${height}`;
if (cache.has(key)) {
return cache.get(key);
}
const result = await sharp(inputPath)
.resize(width, height)
.jpeg({ quality: 80 })
.toBuffer();
cache.set(key, result);
return result;
}Clear Sharp Cache
javascript
// Clear cache periodically to release memory
setInterval(() => {
sharp.cache(false);
}, 60000); // Clear cache every minuteAlgorithm Selection
Select Appropriate Resize Algorithm
javascript
// Use faster algorithm for shrinking
await sharp('input.jpg')
.resize(300, 200, { kernel: sharp.kernel.cubic })
.toFile('output.jpg');
// Use higher quality algorithm for enlarging
await sharp('input.jpg')
.resize(1200, 800, { kernel: sharp.kernel.lanczos3 })
.toFile('output.jpg');Batch Processing Optimization
javascript
async function optimizedBatchProcess(files) {
// Group by size
const smallFiles = [];
const largeFiles = [];
for (const file of files) {
const metadata = await sharp(file).metadata();
if (metadata.width * metadata.height < 1000000) {
smallFiles.push(file);
} else {
largeFiles.push(file);
}
}
// Use fast algorithm for small files
await Promise.all(smallFiles.map(file =>
sharp(file)
.resize(300, 200, { kernel: sharp.kernel.cubic })
.jpeg({ quality: 80 })
.toFile(`output_${file}`)
));
// Use higher quality algorithm for large files
await Promise.all(largeFiles.map(file =>
sharp(file)
.resize(800, 600, { kernel: sharp.kernel.lanczos3 })
.jpeg({ quality: 90 })
.toFile(`output_${file}`)
));
}Network Optimization
Streaming Response
javascript
// Express.js example
app.get('/image/:filename', async (req, res) => {
const filename = req.params.filename;
try {
const imageStream = sharp(`images/${filename}`)
.resize(300, 200)
.jpeg({ quality: 80 });
res.set('Content-Type', 'image/jpeg');
imageStream.pipe(res);
} catch (error) {
res.status(404).send('Image not found');
}
});Conditional Processing
javascript
app.get('/image/:filename', async (req, res) => {
const { filename } = req.params;
const { width, height, quality = 80 } = req.query;
try {
let image = sharp(`images/${filename}`);
if (width || height) {
image = image.resize(parseInt(width), parseInt(height));
}
if (req.headers.accept?.includes('image/webp')) {
image = image.webp({ quality: parseInt(quality) });
res.set('Content-Type', 'image/webp');
} else {
image = image.jpeg({ quality: parseInt(quality) });
res.set('Content-Type', 'image/jpeg');
}
image.pipe(res);
} catch (error) {
res.status(404).send('Image not found');
}
});Monitoring and Debugging
Performance Monitoring
javascript
async function processWithTiming(inputPath, outputPath) {
const startTime = Date.now();
try {
await sharp(inputPath)
.resize(800, 600)
.jpeg({ quality: 80 })
.toFile(outputPath);
const endTime = Date.now();
console.log(`Processing time: ${endTime - startTime}ms`);
} catch (error) {
console.error('Processing failed:', error.message);
}
}Memory Usage Monitoring
javascript
const process = require('process');
function logMemoryUsage() {
const usage = process.memoryUsage();
console.log('Memory usage:', {
rss: `${Math.round(usage.rss / 1024 / 1024)} MB`,
heapTotal: `${Math.round(usage.heapTotal / 1024 / 1024)} MB`,
heapUsed: `${Math.round(usage.heapUsed / 1024 / 1024)} MB`,
external: `${Math.round(usage.external / 1024 / 1024)} MB`
});
}
// Record memory usage before and after processing
logMemoryUsage();
await sharp('input.jpg').resize(800, 600).toFile('output.jpg');
logMemoryUsage();Error Handling and Retry
Retry Mechanism
javascript
async function processWithRetry(inputPath, outputPath, maxRetries = 3) {
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
await sharp(inputPath)
.resize(800, 600)
.jpeg({ quality: 80 })
.toFile(outputPath);
console.log(`Processing successful, attempt: ${attempt}`);
return;
} catch (error) {
console.error(`Attempt ${attempt} failed:`, error.message);
if (attempt === maxRetries) {
throw new Error(`Processing failed, retried ${maxRetries} times`);
}
// Wait for a while before retrying
await new Promise(resolve => setTimeout(resolve, 1000 * attempt));
}
}
}Error Classification Processing
javascript
async function robustProcess(inputPath, outputPath) {
try {
await sharp(inputPath)
.resize(800, 600)
.jpeg({ quality: 80 })
.toFile(outputPath);
} catch (error) {
if (error.code === 'VipsForeignLoad') {
console.error('Unsupported image format');
} else if (error.code === 'VipsForeignLoadLimit') {
console.error('Image too large, trying to shrink');
// Try to process a smaller version
await sharp(inputPath, { limitInputPixels: 268402689 })
.resize(400, 300)
.jpeg({ quality: 80 })
.toFile(outputPath);
} else if (error.code === 'ENOSPC') {
console.error('Disk space不足');
} else {
console.error('Unknown error:', error.message);
}
}
}Best Practices Summary
1. Select Appropriate Processing Method
javascript
// Small file: direct processing
if (fileSize < 1024 * 1024) {
await sharp(file).resize(300, 200).toFile(output);
}
// Large file: streaming processing
else {
fs.createReadStream(file)
.pipe(sharp().resize(300, 200))
.pipe(fs.createWriteStream(output));
}2. Batch Processing Optimization
javascript
// Use Promise.all for concurrent processing
const promises = files.map(file =>
sharp(file).resize(300, 200).jpeg().toFile(`output_${file}`)
);
await Promise.all(promises);3. Memory Management
javascript
// Clear cache periodically
setInterval(() => {
sharp.cache(false);
}, 300000); // Clear cache every 5 minutes4. Error Handling
javascript
// Always use try-catch
try {
await sharp(input).resize(300, 200).toFile(output);
} catch (error) {
console.error('Processing failed:', error.message);
// Provide a backup plan
}