AWS Lambda compress every S3 file script
Auto compress every uploaded file, and create a new file with same Content-Type
and original filename with .gz
suffix.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
const aws = require('aws-sdk'); const zlib = require('zlib'); const s3 = new aws.S3(); const compress = (data) => { return new Promise((resolve, reject) => { new zlib.gzip(data, { level: 9 }, (err, compressed) => { if (err) { reject(err); } else { resolve(compressed); } }); }); }; const makeTask = async (record) => { try { const srcBucket = record.s3.bucket.name; const srcKey = decodeURIComponent(record.s3.object.key.replace(/\+/g, " ")); // Add suffix '.gz' to compressed file const destBucket = srcBucket; const destKey = srcKey + '.gz'; const { Body, ContentType } = await s3.getObject({ Bucket: srcBucket, Key: srcKey }).promise(); const compressed = await compress(Body); await s3.putObject({ Bucket: destBucket, Key: destKey, Body: compressed, ContentType: ContentType, ContentEncoding: 'gzip' }).promise(); return 'Success: ' + destBucket + '/' + destKey; } catch (err) { console.log('Error: ', err); return err; } }; exports.handler = async (event, context) => { // filter gzipped files var record = event.Records.filter(r => { return !(r.s3.object.key.replace(/\+/g, ' ').endsWith('.gz')) || r.s3.object.ContentEncoding === 'gzip'; }); const tasks = record.map(r => makeTask(r)); try { await Promise.all(tasks); context.succeed(); } catch (err) { context.fail(err); } }; |