我是NodeJs的初学者,请原谅。下面的lambda函数是在S3中压缩/压缩文件,并将压缩后的文件上传回S3。ListOfKeys
包含要压缩的键的列表。如果您注意到(listOfKeys中的const file)的,那么如果有一个大的数据集,即
listOfKeys
有很长的键列表,则会同步运行得到的lambda超时。问题是,是否有一种方法可以异步或并行地运行循环,以便及时或异步地压缩文件?
代码:
const AWS = require('aws-sdk');
const archiver = require('archiver');
const stream = require('stream');
const request = require('request');
const awsOptions = {
region: 'us-east-1'
};
const s3 = new AWS.S3(awsOptions);
const streamTo = (bucket, key) =>{};
const getStream = (bucket, key) => {};
exports.handler = async (event, context, callback) => {
const listOfKeys = [];
// some code here..
listOfKeys.push(...contents.map(x => x.Key));
await new Promise(async (resolve, reject) => {
var zipStream = streamTo(SrcBucket, destinationKey);
zipStream.on("close", resolve);
zipStream.on("end", resolve);
zipStream.on("error", reject);
var archive = archiver("zip");
archive.on("error", err => {
throw new Error(err);
});
archive.pipe(zipStream);
for (const file in listOfKeys) {
archive.append(getStream(SrcBucket, listOfKeys[file]), { name: listOfKeys[file].split('/')[3] });
}
archive.finalize();
}).catch(err => {
throw new Error(err);
});
callback(null, {
body: { final_destination: destinationKey }
});
};
const array1 = ['a', 'b', 'c'];
array1.forEach(element => console.log(element));
// expected output: "a"
// expected output: "b"
// expected output: "c"
所以您的代码应该是:
listOfKeys.forEach(file => {
archive.append(getStream(SrcBucket, listOfKeys[file]), { name: listOfKeys[file].split('/')[3] })
})
(不确定是否可行,请告诉我)
源:array.prototype.foreach()MDN
我可能会更加灵活地重写整个内容,但为了回答您的具体问题:将ListOfKeys.foreach
语句替换为:
await Promise
.all(
listOfKeys.map(key => archive.append(getStream(SrcBucket, key), { name: key.split('/')[3] }))
);