Copy // processing-function/index.js
const { Storage } = require('@google-cloud/storage');
const { Firestore } = require('@google-cloud/firestore');
const sharp = require('sharp');
const path = require('path');
const os = require('os');
const fs = require('fs').promises;
const storage = new Storage();
const firestore = new Firestore();
// Environment variables
const inputBucket = process.env.INPUT_BUCKET;
const outputBucket = process.env.OUTPUT_BUCKET;
const sizeConfigs = parseSizeConfigs(process.env.SIZES || 'thumbnail=150x150');
/**
* Process an image when a message is published to Pub/Sub
*/
exports.processImage = async (message, context) => {
// Decode the Pub/Sub message
const data = message.data
? JSON.parse(Buffer.from(message.data, 'base64').toString())
: {};
const { fileName, bucket, contentType, metadata } = data;
if (!fileName || !bucket || !contentType) {
console.error('Missing required message data');
return;
}
const tempLocalPath = path.join(os.tmpdir(), path.basename(fileName));
const outputPaths = [];
try {
// Download the original file
await storage.bucket(bucket).file(fileName).download({ destination: tempLocalPath });
console.log(`Downloaded ${fileName} to ${tempLocalPath}`);
// Process each configured size
for (const [sizeName, dimensions] of Object.entries(sizeConfigs)) {
const outputFileName = generateOutputFileName(fileName, sizeName);
const tempOutputPath = path.join(os.tmpdir(), outputFileName);
// Resize the image
await sharp(tempLocalPath)
.resize({
width: dimensions.width,
height: dimensions.height,
fit: sharp.fit.cover,
position: sharp.strategy.attention
})
.toFile(tempOutputPath);
// Upload the resized image
await storage.bucket(outputBucket).upload(tempOutputPath, {
destination: outputFileName,
metadata: {
contentType,
metadata: {
originalImage: fileName,
sizeName,
width: dimensions.width.toString(),
height: dimensions.height.toString()
}
}
});
// Clean up temp file
await fs.unlink(tempOutputPath);
// Record the output version
outputPaths.push({
sizeName,
path: outputFileName,
width: dimensions.width,
height: dimensions.height,
bucket: outputBucket
});
}
// Optimize the original and upload as "full" size
const optimizedPath = path.join(os.tmpdir(), `optimized-${path.basename(fileName)}`);
await sharp(tempLocalPath)
.withMetadata()
.jpeg({ quality: 85 })
.toFile(optimizedPath);
await storage.bucket(outputBucket).upload(optimizedPath, {
destination: `full/${path.basename(fileName)}`,
metadata: {
contentType,
metadata: {
originalImage: fileName,
sizeName: 'full',
optimized: 'true'
}
}
});
// Add full size to outputs
outputPaths.push({
sizeName: 'full',
path: `full/${path.basename(fileName)}`,
width: metadata.width,
height: metadata.height,
bucket: outputBucket
});
// Update metadata in Firestore
await firestore.collection('images')
.doc(fileName.replace(/[\/\.]/g, '_'))
.update({
status: 'processed',
processedAt: new Date(),
outputVersions: outputPaths
});
console.log(`Successfully processed ${fileName} into ${outputPaths.length} sizes`);
// Clean up the temp file
await fs.unlink(tempLocalPath);
await fs.unlink(optimizedPath);
} catch (error) {
console.error(`Error processing image ${fileName}:`, error);
// Update Firestore with error
await firestore.collection('images')
.doc(fileName.replace(/[\/\.]/g, '_'))
.update({
status: 'error',
errorMessage: error.message,
errorAt: new Date()
});
}
};
/**
* Parse size configurations from environment variable
* Format: "name=widthxheight,name2=widthxheight"
*/
function parseSizeConfigs(sizesString) {
const configs = {};
sizesString.split(',').forEach(sizeConfig => {
const [name, dimensions] = sizeConfig.trim().split('=');
if (name && dimensions) {
const [width, height] = dimensions.split('x').map(Number);
if (width && height) {
configs[name] = { width, height };
}
}
});
return configs;
}
/**
* Generate output file name based on original and size name
*/
function generateOutputFileName(originalName, sizeName) {
const ext = path.extname(originalName);
const baseName = path.basename(originalName, ext);
return `${sizeName}/${baseName}-${sizeName}${ext}`;
}