[英]Convert Firebase Function to Async/Await
The code below is writing only the header file to the csv file(what is outputted).下面的代码仅将 header 文件写入 csv 文件(输出内容)。 I am assuming it is because the "get" requests within the function are asynchronous and therefore the function is not waiting for the data to be retrieved etc. How can I refactor this to await the result of the second forEach loop?
我假设这是因为 function 中的“获取”请求是异步的,因此 function 不等待检索数据等。我如何重构它以等待第二个 forEach 循环的结果?
newApp.get('/createUtilCSV', (req, res) => {
try {
const tempFilePath = path.join(os.tmpdir(), "Util_" + req.query.start + "<->" + req.query.end + ".csv");
var fromDate = new Date(req.query.start)
var toDate = new Date(req.query.end)
var fromD = admin.firestore.Timestamp.fromDate(fromDate);
var toD = admin.firestore.Timestamp.fromDate(toDate);
firestore.collection('utilisation')
.where("timestamp", ">=", fromD)
.where("timestamp", "<=", toD)
.get()
.then(snapshot => {
var output = fs.createWriteStream(tempFilePath);
var archive = archiver('zip', {
gzip: true,
zlib: {
level: 9
}
});
var utilText = "App Version,Author,Building,City,Country,Date,Floor ID,Floor Name, Floor Region,Bookable,Business Unit,Capacity,Fixed,Furniture Height,Occupancy,Position X,Position Y,Restricted,Status Date,Status Type,Unique Ref,Workspace Type,Report ID\n";
let utils = snapshot.docs;
console.log("util info: " + utils);
utils.forEach(doc => {
var line = doc.data();
var utilLine = "";
utilLine = utilLine + line.appVersion + ",";
utilLine = utilLine + line.authorEmail + ",";
utilLine = utilLine + line.building + ",";
utilLine = utilLine + line.city + ",";
utilLine = utilLine + line.country + ",";
utilLine = utilLine + line.timestamp.toDate() + ",";
utilLine = utilLine + line.floorId + ",";
utilLine = utilLine + line.floorName + ",";
utilLine = utilLine + line.region + ",";
firestore.collection('utilisation')
.doc(line.documentID)
.collection('workspaces')
.get()
.then( snap => {
let workspaces = snap.docs;
workspaces.forEach(space => {
var lineSpace = space.data();
var spaceAdd = utilLine;
spaceAdd = spaceAdd + lineSpace.bookable + ",";
spaceAdd = spaceAdd + lineSpace.businessUnit + ",";
spaceAdd = spaceAdd + lineSpace.capacity + ",";
spaceAdd = spaceAdd + lineSpace.fixed + ",";
spaceAdd = spaceAdd + lineSpace.height + ",";
spaceAdd = spaceAdd + lineSpace.occupancy + ",";
spaceAdd = spaceAdd + lineSpace.positionX + ",";
spaceAdd = spaceAdd + lineSpace.positionY + ",";
spaceAdd = spaceAdd + lineSpace.restricted + ",";
spaceAdd = spaceAdd + lineSpace.statusDate.toDate() + ",";
spaceAdd = spaceAdd + lineSpace.statusType + ",";
spaceAdd = spaceAdd + lineSpace.uniqueRef + ",";
spaceAdd = spaceAdd + lineSpace.workspaceType + ",";
spaceAdd = spaceAdd + line.documentID + "\n";
utilText = utilText + spaceAdd;
});
});
});
try {
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
console.log("Util Text: " + utilText);
bucket.upload(tempFilePath, {
make_public: true,
gzip: true,
destination: tempFilePath,
metadata: metadata,
});
//Let's get the signed URL
const file = admin.storage().bucket().file(tempFilePath);
console.log('File: ' + file);
var expiryDate = new Date();
expiryDate.setDate(expiryDate.getDate() + 1);
file.getSignedUrl({
action: 'read',
expires: expiryDate
}).then(urls => {
const signedUrl = urls[0];
console.log('Send: ' + signedUrl);
res.redirect(signedUrl);
res.end();
});
});
output.on('end', function() {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
const metadata = {
contentType: "application/zip",
};
archive.pipe(output);
// append a file from string
archive.append(utilText, {
name: tempFilePath
});
archive.finalize();
console.log("tempFilePath: " + tempFilePath);
console.log("Export Util CSV finished");
} catch (err) {
console.log(err);
}
})
.catch(err => console.log(err));
} catch (error) {
console.log("Export error:" + error);
};
});
You could enclose it on an async
function and use await
.您可以将其包含在
async
function 中并使用await
。 If you want to read the files in sequence, you cannot use forEach
.如果要按顺序读取文件,则不能使用
forEach
。 Just use a modern for … of loop instead, in which await will work as expected:只需使用现代的 for … of循环代替,其中 await 将按预期工作:
const utilisation = async () => {
let utilisationRef = db.collection('utilisation')
.where("timestamp", ">=", fromD)
.where("timestamp", "<=", toD)
let allUtilisations = await utilisationRef.get();
// ... some code
for(const doc of allUtilisations.docs){
console.log(doc.data());
// ... some code
}
}
utilisation();
or you can even use .map
:或者你甚至可以使用
.map
:
const utilisation = async () => {
let utilisationRef = db.collection('utilisation')
.where("timestamp", ">=", fromD)
.where("timestamp", "<=", toD)
let allUtilisations = await utilisationRef.get();
const documents = allUtilisations.docs.map((doc) => ({ id: doc.id, ...doc.data() }));
// ... some code
for(const doc of documents){
console.log(doc);
// ... some code
}
}
utilisation();
From the above code, you can now refactor your code and even for your second loop.从上面的代码中,您现在可以重构您的代码,甚至重构您的第二个循环。
Thanks to all those with advice.感谢所有提供建议的人。 I have refined this code and the following works now as expected:
我已经完善了这段代码,现在可以按预期进行以下工作:
async function utilCSV(req, res) { const tempFilePath = path.join(os.tmpdir(), "Util_" + req.query.date + ".csv"); const minDate = new Date(req.query.date + " 00:00:00"); const maxDate = new Date(req.query.date + " 23:59:59"); const minTimestamp = admin.firestore.Timestamp.fromDate(minDate) const maxTimestamp = admin.firestore.Timestamp.fromDate(maxDate) const city = req.query.city; var output = fs.createWriteStream(tempFilePath); var archive = archiver('zip', { gzip: true, zlib: { level: 9 } }); const snapshot = await firestore.collection("utilisation").where("timestamp", ">", minTimestamp).where("timestamp", "<", maxTimestamp).get(); var utilText = "App Version,Author,Building,City,Country,Date,Floor ID,Floor Name, Floor Region,Bookable,Business Unit,Capacity,Fixed,Furniture Height,Occupancy,Position X,Position Y,Restricted,Status Date,Status Type,Unique Ref,Workspace Type,Report ID\n"; let utils = snapshot.docs; console.log("util info: " + utils); for (const doc of utils) { var line = doc.data(); var utilLine = ""; utilLine = utilLine + line.appVersion + ","; utilLine = utilLine + line.authorEmail + ","; utilLine = utilLine + line.building + ","; utilLine = utilLine + line.city + ","; utilLine = utilLine + line.country + ","; utilLine = utilLine + line.timestamp.toDate() + ","; utilLine = utilLine + line.floorId + ","; utilLine = utilLine + line.floorName + ","; utilLine = utilLine + line.region + ","; let spacesRef = firestore.collection('utilisation').doc(line.documentID).collection('workspaces'); const snap = await spacesRef.get(); let workspaces = snap.docs; for (const space of workspaces) { var lineSpace = space.data(); var spaceAdd = utilLine; spaceAdd = spaceAdd + lineSpace.bookable + ","; spaceAdd = spaceAdd + lineSpace.businessUnit + ","; spaceAdd = spaceAdd + lineSpace.capacity + ","; spaceAdd = spaceAdd + lineSpace.fixed + ","; spaceAdd = spaceAdd + lineSpace.height + ","; spaceAdd = spaceAdd + lineSpace.occupancy + ","; spaceAdd = spaceAdd + lineSpace.positionX + ","; spaceAdd = spaceAdd + lineSpace.positionY + ","; spaceAdd = spaceAdd + lineSpace.restricted + ","; spaceAdd = spaceAdd + lineSpace.statusDate.toDate() + ","; spaceAdd = spaceAdd + lineSpace.statusType + ","; spaceAdd = spaceAdd + lineSpace.uniqueRef + ","; spaceAdd = spaceAdd + lineSpace.workspaceType + ","; spaceAdd = spaceAdd + line.documentID + "\n"; utilText = utilText + spaceAdd; }; }; console.log("Complete string: " + utilText); try { // listen for all archive data to be written // 'close' event is fired only when a file descriptor is involved output.on('close', function () { console.log(archive.pointer() + ' total bytes'); console.log('archiver has been finalized and the output file descriptor has closed.'); console.log("Util Text: " + utilText); bucket.upload(tempFilePath, { make_public: true, gzip: true, destination: tempFilePath, metadata: metadata, }); //Let's get the signed URL const file = admin.storage().bucket().file(tempFilePath); console.log('File: ' + file); var expiryDate = new Date(); expiryDate.setDate(expiryDate.getDate() + 1); file.getSignedUrl({ action: 'read', expires: expiryDate }).then(urls => { const signedUrl = urls[0]; console.log('Send: ' + signedUrl); res.redirect(signedUrl); res.end(); }); }); output.on('end', function () { console.log('Data has been drained'); }); // good practice to catch warnings (ie stat failures and other non-blocking errors) archive.on('warning', function (err) { if (err.code === 'ENOENT') { // log warning } else { // throw error throw err; } }); // good practice to catch this error explicitly archive.on('error', function (err) { throw err; }); const metadata = { contentType: "application/zip", }; archive.pipe(output); // append a file from string archive.append(utilText, { name: tempFilePath }); archive.finalize(); console.log("tempFilePath: " + tempFilePath); console.log("Export Util CSV finished"); } catch (err) { console.log(err); } }
声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.