简体   繁体   English

我如何在快速应用程序中加速 firebase 功能,这些功能加载时间太长,大多数时间超过 8 秒

[英]How can i speed up firebase functions in an express app which are taking too long to load taking more than 8 seconds most times

We have an application that uses a nodejs and express backend powered by firebase but one problem that we're facing is very long load times even for simple queries.我们有一个应用程序使用由 firebase 提供支持的 nodejs 和 express 后端,但我们面临的一个问题是加载时间非常长,即使对于简单的查询也是如此。 We refractored our endpoints into different files so that they all don't run all at once and we also have cron jobs which i suspected would help with cold starts in which i'm convinced and doubt we experiencing as all the requests including subsquent req are just slow and they really take at most times more than 8 seconds which is a poor perfomance for users to experience.我们将我们的端点折射到不同的文件中,这样它们就不会同时运行,我们也有 cron 作业,我怀疑这有助于冷启动,我相信并怀疑我们遇到的所有请求,包括后续请求都是只是很慢,而且它们实际上最多需要 8 秒以上的时间,这对用户来说是一种糟糕的体验。

in our package.json this what we have in order for you to see the firebase versions we using在我们的 package.json 中,这是为了让您看到我们使用的 firebase 版本

{
  "name": "functions",
  "description": "Cloud Functions for Firebase",
  "scripts": {
    "serve": "firebase serve --only functions",
    "shell": "firebase functions:shell",
    "start": "npm run shell",
    "deploy": "firebase deploy --only functions",
    "logs": "firebase functions:log"
  },
  "engines": {
    "node": "14"
  },
  "dependencies": {
    "@google-cloud/storage": "^5.8.2",
    "@sendgrid/mail": "^7.2.1",
    "algoliasearch": "^4.3.0",
    "bcrypt": "^5.1.0",
    "busboy": "^0.3.1",
    "cookie-parser": "^1.4.5",
    "cors": "^2.8.5",
    "dayjs": "^1.10.4",
    "dotenv": "^8.2.0",
    "easy-soap-request": "^4.1.3",
    "express": "^4.17.1",
    "firebase": "^7.15.5",
    "firebase-admin": "^8.6.0",
    "firebase-functions": "^3.23.0",
    "fs-extra": "^9.0.1",
    "jwt-decode": "^2.2.0",
    "moment": "^2.29.1",
    "request": "^2.88.2",
    "sharp": "^0.25.4",
    "sib-api-v3-sdk": "^8.4.2",
    "uuid": "^8.2.0",
    "xml-js": "^1.6.11"
  },
  "devDependencies": {
    "firebase-functions-test": "^0.1.6"
  },
  "private": true
}

below is the index.js file on how we set up everything.下面是关于我们如何设置所有内容的 index.js 文件。

require("dotenv").config();
const functions = require("firebase-functions");
const express = require("express");
const app = express();

const cookieParser = require("cookie-parser");
const cors = require("cors");

app.use(cors());
app.use(cookieParser());
app.use(express.json());

const dashboardRoutes = require("./routes/dashboardRoutes");
const userRoutes = require("./routes/userRoutes");
const pagesRoutes = require("./routes/pagesRoutes");
const orderRoutes = require("./routes/orderRoutes");
const cartRoutes = require("./routes/cartRoutes");
const wishlistRoutes = require("./routes/wishlistRoutes");
const authRoutes = require("./routes/authRoutes");
const storeRoutes = require("./routes/storeRoutes");
const createSellerRoutes = require("./routes/createSellerRoutes");

app.use("/", pagesRoutes);
app.use("/user", userRoutes);
app.use("/dash", dashboardRoutes);
app.use("/order", orderRoutes);
app.use("/cart", cartRoutes);
app.use("/wishlist", wishlistRoutes);
app.use("/auth", authRoutes);
app.use("/s", storeRoutes);
app.use("/cr", createSellerRoutes);

const {
  cron_job1,
  cron_job2,
  cron_job3,
  cron_job4,
} = require("./triggers/search_triggers_and_cron_jobs"); <-- not the name of the actual file

const { **other cron jobs** } = require("./cron-jobs");
const {
  update_exchange_currency_rates,
} = require("./cron-jobs/currency_exchange_rates");
const { reset_product_visits } = require("./triggers/products");
const { Home } = require("./handlers/pages");
const { db } = require("./util/admin");
const { product_basic_obj } = require("./util/product_basic_obj");

exports.apis = functions.https.onRequest(app);

// this functionality below is the sample for the kind of executions we perfom in the endpoints in which we also experience slow execution times ven for a function in this file

app.get("/test-home", (req, res) => {
  let content = {};
  db.collection("products")
    .where("status", "==", "active")
    .orderBy("visited", "desc")
    .limit(20)
    .get()
    .then((data) => {
      content.popular_today = [];
      data.forEach((x) => {
        content.popular_today.push(product_basic_obj(x.data()));
      });
      return db
        .collection("products")
        .where("status", "==", "active")
        .orderBy("todaysSales", "desc")
        .limit(20)
        .get();
    })
    .then((data) => {
      content.hot_today = [];
      data.forEach((x) => {
        content.hot_today.push(product_basic_obj(x.data()));
      });

      return db.collection("departments").get();
    })
    .then((data) => {
      content.departments = [];
      data.forEach((x) => {
        content.departments.push(x.data());
      });

      return db
        .collection("departments")
        .orderBy("products_sold_today", "desc")
        .limit(6)
        .get();
    })
    .then((data) => {
      content.top_departments = [];
      data.forEach((x) => {
        content.top_departments.push(x.data());
      });
      return res.status(200).json(content);
    });
});

//cron jobs
exports.cron_job1 = cron_job1;
exports.cron_job2 = cron_job2;
exports.cron_job3 = cron_job3;
exports.cron_job4 = cron_job4;

Upon executing an end point this is what shows in the consol and the in a deployed development we experience the same slow execution times which seems to be the average for all our executions在执行结束点时,这就是控制台和已部署开发中显示的内容,我们经历了同样缓慢的执行时间,这似乎是我们所有执行的平均值

i  functions: Beginning execution of "us-central1-apis"
⚠  Google API requested!
   - URL: "https://www.googleapis.com/oauth2/v4/token"
   - Be careful, this may be a production service.
i  functions: Finished "us-central1-apis" in ~9s
i  functions: Finished "us-central1-apis" in ~8s
i  functions: Beginning execution of "us-central1-apis"
i  functions: Finished "us-central1-apis" in ~7s
i  functions: Beginning execution of "us-central1-apis"
i  functions: Finished "us-central1-apis" in ~7s
i  functions: Beginning execution of "us-central1-apis"
i  functions: Finished "us-central1-apis" in ~7s
i  functions: Beginning execution of "us-central1-apis"
i  functions: Finished "us-central1-apis" in ~6s
i  functions: Beginning execution of "us-central1-apis"
i  functions: Finished "us-central1-apis" in ~7s
i  functions: Beginning execution of "us-central1-apis"
i  functions: Finished "us-central1-apis" in ~7s

How can i speed up the execution using the information above.我怎样才能使用上面的信息加快执行速度。

We tried breaking code into smaller files which couldn't work as we expected to get more faster excecutions and we also removed most of our 3rd part libraries but we failed to make a change.我们尝试将代码分解成更小的文件,这些文件无法正常工作,因为我们希望获得更快的执行速度,我们还删除了大部分第 3 部分库,但我们未能进行更改。 What could we do to bring down executions times futher down.我们可以做些什么来进一步缩短执行时间。

Your data loading strategy is sequential, Load 1 then Load2 then Load3, and in case none of the following Load depends on the result of previous Load - that aproach is not really effective and useful.您的数据加载策略是顺序的,先加载 1,然后加载 2,然后加载 3,以防以下加载都不依赖于先前加载的结果 - 这种方法并不是真正有效和有用。

Instead - you can try to utilize Promise.all() to fire all that promises "in parallel".相反 - 您可以尝试利用Promise.all()来“并行”触发所有承诺。

Next issue - you are loading departments from firebase twice, actual departments and top_departments , and there is no need to load top_departments again due to all the data that is needed is already in departments , you only need to .sort and slice them (or its shallow copy [...departments] ).下一期 - 您正在从 firebase 加载departments两次,实际departmentstop_departments ,并且由于所需的所有数据已经在departments中,因此无需再次加载top_departments ,您只需要对它们进行.sortslice (或其浅拷贝[...departments] )。

I'd try this approach:我会尝试这种方法:

// popular_today
function getPopularProductsByVisitedAsync() {
  return db
    .collection("products")
    .where("status", "==", "active")
    .orderBy("visited", "desc")
    .limit(20)
    .get()
    .then((data) => {
      return data.docs.map((x) => product_basic_obj(x.data()));
    });
}

// hot_today
function getPopularProductsByTodaySalesAsync() {
  return db
    .collection("products")
    .where("status", "==", "active")
    .orderBy("todaysSales", "desc")
    .limit(20)
    .get()
    .then((data) => {
      return data.docs.map((x) => product_basic_obj(x.data()));
    });
}

function getAllDepartmentsAsync() {
  return db
    .collection("departments")
    .get()
    .then((data) => data.docs.map((x) => x.data()));
}

app.get("/test-home", async (req, res) => {
  const [popular_today, hot_today, departments] = await Promise.all([
    getPopularProductsByVisitedAsync(),
    getPopularProductsByTodaySalesAsync(),
    getAllDepartmentsAsync()
  ]);
  // TODO: Check asc/desc, im not sure, was not testing
  const top_departments = [...departments]
    .sort((a, b) => a.products_sold_today - b.products_sold_today)
    .slice(0, 6);

  const content = {
    popular_today: popular_today,
    hot_today: hot_today,
    departments: departments,
    top_departments: top_departments
  };
  return res.status(200).json(content);
});

Try to execute your requests in parallel on the index.js.尝试在 index.js 上并行执行您的请求。 This optimization will provide some gains on the.network request timings.此优化将在网络请求时间上提供一些收益。

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

相关问题 为什么部署在 Google 计算引擎 VM 实例中的网站加载时间过长? - Why my website that is deployed in Google compute engine VM instance taking too long to load? Nginx 比 uwsgi 花费更多的时间 - Nginx taking way more time than uwsgi 我的 Firestore 中有超过 50,000 个文档,排序和查找的时间超过 30 秒,限制为 50 个。如何加速? - I have more than 50,000 documents in my firestore and it is taking more than 30secs to sort and find in limits of 50. how to speedup? 谷歌云:Dataproc 启动浏览器的时间太长 - Google cloud: Dataproc taking too long to start the explorer AWS Code Build - 缓存的 DOWNLOAD_SOURCE 花费的时间太长 - AWS Code Build - cached DOWNLOAD_SOURCE taking too long Google Cloud Composer 安装依赖项的时间过长 - Google Cloud Composer taking too long to install dependencies 如何使用 alembic 运行 alter table migration - 花费太长时间且永无止境 - How to run an alter table migration with alembic - taking too long and never ends MongoDb 中的 collection.find({matchQuery}) 花费的时间太长 - collection.find({matchQuery}) in MongoDb is taking too long spark.read.json() 加载数据需要很长时间 - spark.read.json() taking extremely long to load data 如何使用 kql 查询查找哪个 pod 在 AKS 集群中摄取更多数据? - How to find which pod is taking more data ingestion in AKS cluster using kql query?
 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM