fix: Improve ticket query in backend to resolve high delay issue
parent
dc5a6945d2
commit
c27770ef02
|
@ -263,8 +263,6 @@ export const reportMessagesUserByDateStartDateEnd = async (
|
|||
}
|
||||
|
||||
data_query_messages[i].id = i + 1;
|
||||
|
||||
console.log("data_query_messages: ", data_query_messages[i]);
|
||||
}
|
||||
|
||||
return res.status(200).json(data_query_messages);
|
||||
|
|
|
@ -18,6 +18,7 @@ import { startOfDay, endOfDay, parseISO, getDate } from "date-fns";
|
|||
import { string } from "yup/lib/locale";
|
||||
import Whatsapp from "../../models/Whatsapp";
|
||||
import Query from "mysql2/typings/mysql/lib/protocol/sequences/Query";
|
||||
import { te } from "date-fns/locale";
|
||||
|
||||
interface Request {
|
||||
userId: string | number;
|
||||
|
@ -43,43 +44,56 @@ const ShowTicketReport = async ({
|
|||
createdOrUpdated = "created",
|
||||
queueId
|
||||
}: Request): Promise<Response> => {
|
||||
let where_clause: any = {};
|
||||
let query = "";
|
||||
// let where_clause: any = {};
|
||||
// let query = "";
|
||||
|
||||
if (userId !== "0") {
|
||||
where_clause.userid = userId;
|
||||
query = `AND t.userId = ${userId}`;
|
||||
}
|
||||
// if (userId !== "0") {
|
||||
// where_clause.userid = userId;
|
||||
// query = `AND t.userId = ${userId}`;
|
||||
// }
|
||||
|
||||
// if (queueId) {
|
||||
// where_clause.queueId = queueId;
|
||||
// query = `AND t.queueId = ${queueId}`;
|
||||
// }
|
||||
|
||||
const createdAtOrUpdatedAt =
|
||||
createdOrUpdated == "created" ? "createdAt" : "updatedAt";
|
||||
|
||||
let where_clause = {};
|
||||
|
||||
if (queueId) {
|
||||
where_clause.queueId = queueId;
|
||||
query = `AND t.queueId = ${queueId}`;
|
||||
where_clause = {
|
||||
queueId: queueId,
|
||||
[createdAtOrUpdatedAt]: {
|
||||
[Op.gte]: startDate + " 00:00:00.000000",
|
||||
[Op.lte]: endDate + " 23:59:59.999999"
|
||||
}
|
||||
};
|
||||
} else if (userId == "0") {
|
||||
where_clause = {
|
||||
[createdAtOrUpdatedAt]: {
|
||||
[Op.gte]: startDate + " 00:00:00.000000",
|
||||
[Op.lte]: endDate + " 23:59:59.999999"
|
||||
}
|
||||
};
|
||||
} else if (userId != "0") {
|
||||
where_clause = {
|
||||
userid: userId,
|
||||
[createdAtOrUpdatedAt]: {
|
||||
[Op.gte]: startDate + " 00:00:00.000000",
|
||||
[Op.lte]: endDate + " 23:59:59.999999"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const limit = 40;
|
||||
const offset = limit * (+pageNumber - 1);
|
||||
|
||||
const createdAtOrUpdatedAt =
|
||||
createdOrUpdated == "created" ? "createdAt" : "updatedAt";
|
||||
|
||||
const _ticketsId = await sequelize.query(
|
||||
`SELECT t.id
|
||||
FROM Tickets t
|
||||
INNER JOIN (
|
||||
SELECT DISTINCT ticketId
|
||||
FROM Messages
|
||||
WHERE ${createdAtOrUpdatedAt} >= '${startDate} 00:00:00' AND ${createdAtOrUpdatedAt} <= '${endDate} 23:59:59'
|
||||
) AS m ON m.ticketId = t.id ${query};`,
|
||||
{ type: QueryTypes.SELECT }
|
||||
);
|
||||
|
||||
let { count, rows: tickets }: any = await Ticket.findAndCountAll({
|
||||
where: {
|
||||
id: { [Op.in]: _ticketsId.map((t: any) => t.id) }
|
||||
},
|
||||
where: where_clause,
|
||||
limit,
|
||||
offset,
|
||||
|
||||
attributes: [
|
||||
"id",
|
||||
"status",
|
||||
|
@ -151,9 +165,7 @@ const ShowTicketReport = async ({
|
|||
throw new AppError("ERR_NO_TICKET_FOUND", 404);
|
||||
}
|
||||
|
||||
const ticketIds = tickets.map((t: any) => t.id);
|
||||
|
||||
if (ticketIds.length > 0) {
|
||||
if (tickets.length > 0) {
|
||||
const waiting_time: any = await sequelize.query(
|
||||
`SELECT t.id as ticketId, t.status, TIME_FORMAT(
|
||||
SEC_TO_TIME(
|
||||
|
@ -182,7 +194,7 @@ const ShowTicketReport = async ({
|
|||
JOIN Whatsapps w ON t.whatsappId = w.id
|
||||
JOIN Queues q ON q.id = t.queueId
|
||||
WHERE DATE(m.createdAt) BETWEEN '${startDate} 00:00:00.000000' AND '${endDate} 23:59:59.999999'
|
||||
AND t.id IN (${ticketIds.join()})
|
||||
AND t.id IN (${tickets.map((t: any) => t.id).join()})
|
||||
AND m.createdAt = (SELECT MIN(createdAt) FROM Messages WHERE ticketId = t.id)
|
||||
AND m.fromMe = 0
|
||||
AND t.status IN ('open', 'closed')
|
||||
|
|
Loading…
Reference in New Issue