mirror of
https://github.com/Litlyx/litlyx
synced 2025-12-10 07:48:37 +01:00
rewrite
This commit is contained in:
92
dashboard/server/api/data/events_data/flow_from_name.ts
Normal file
92
dashboard/server/api/data/events_data/flow_from_name.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
|
||||
|
||||
import { getUserProjectFromId } from "~/server/LIVE_DEMO_DATA";
|
||||
import { EventModel } from "@schema/metrics/EventSchema";
|
||||
import { VisitModel } from "@schema/metrics/VisitSchema";
|
||||
|
||||
|
||||
export default defineEventHandler(async event => {
|
||||
|
||||
const data = await getRequestData(event, { requireSchema: false });
|
||||
if (!data) return;
|
||||
|
||||
const { project_id, from, to } = data;
|
||||
|
||||
const { name: eventName } = getQuery(event);
|
||||
|
||||
if (!eventName) return setResponseStatus(event, 400, 'name is required');
|
||||
|
||||
const allEvents = await EventModel.find({
|
||||
project_id: project_id,
|
||||
name: eventName,
|
||||
created_at: {
|
||||
$gte: new Date(from),
|
||||
$lte: new Date(to),
|
||||
}
|
||||
}, { flowHash: 1 });
|
||||
|
||||
|
||||
const allFlowHashes = new Map<string, number>();
|
||||
|
||||
allEvents.forEach(e => {
|
||||
if (!e.flowHash) return;
|
||||
if (e.flowHash.length == 0) return;
|
||||
if (allFlowHashes.has(e.flowHash)) {
|
||||
const count = allFlowHashes.get(e.flowHash) as number;
|
||||
allFlowHashes.set(e.flowHash, count + 1);
|
||||
} else {
|
||||
allFlowHashes.set(e.flowHash, 1);
|
||||
}
|
||||
});
|
||||
|
||||
const flowHashIds = Array.from(allFlowHashes.keys());
|
||||
|
||||
const allReferrers: { referrer: string, flowHash: string }[] = [];
|
||||
|
||||
const promises: any[] = [];
|
||||
while (flowHashIds.length > 0) {
|
||||
promises.push(new Promise<void>(async resolve => {
|
||||
const flowHashIdsChunk = flowHashIds.splice(0, 10);
|
||||
const visits = await VisitModel.find({ project_id, flowHash: { $in: flowHashIdsChunk } }, { referrer: 1, flowHash: 1 });
|
||||
allReferrers.push(...visits.map(e => { return { referrer: e.referrer, flowHash: e.flowHash } }));
|
||||
resolve();
|
||||
}));
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
|
||||
const groupedFlows: Record<string, { referrers: string[] }> = {};
|
||||
|
||||
flowHashIds.forEach(flowHash => {
|
||||
if (!groupedFlows[flowHash]) groupedFlows[flowHash] = { referrers: [] };
|
||||
const target = groupedFlows[flowHash];
|
||||
if (!target) return;
|
||||
const referrers = allReferrers.filter(e => e.flowHash === flowHash).map(e => e.referrer);
|
||||
for (const referrer of referrers) {
|
||||
if (target.referrers.includes(referrer)) continue;
|
||||
target.referrers.push(referrer);
|
||||
}
|
||||
});
|
||||
|
||||
const grouped: Record<string, number> = {};
|
||||
|
||||
for (const referrerPlusHash of allReferrers) {
|
||||
const referrer = referrerPlusHash.referrer;
|
||||
if (!grouped[referrer]) grouped[referrer] = 0
|
||||
grouped[referrer]++;
|
||||
}
|
||||
|
||||
|
||||
const eventsCount = allEvents.length;
|
||||
|
||||
const allGroupedValue = Object.keys(grouped)
|
||||
.map(key => grouped[key])
|
||||
.reduce((a, e) => a + e, 0);
|
||||
|
||||
for (const key in grouped) {
|
||||
grouped[key] = 100 / allGroupedValue * grouped[key];
|
||||
}
|
||||
|
||||
return grouped;
|
||||
|
||||
});
|
||||
@@ -0,0 +1,43 @@
|
||||
|
||||
import { getUserProjectFromId } from "~/server/LIVE_DEMO_DATA";
|
||||
import { EventModel } from "@schema/metrics/EventSchema";
|
||||
import { EVENT_METADATA_FIELDS_EXPIRE_TIME, Redis } from "~/server/services/CacheService";
|
||||
import { PipelineStage } from "mongoose";
|
||||
|
||||
|
||||
export default defineEventHandler(async event => {
|
||||
|
||||
const data = await getRequestData(event, { requireSchema: false });
|
||||
if (!data) return;
|
||||
|
||||
const { project_id } = data;
|
||||
|
||||
|
||||
const { name: eventName, field, from, to } = getQuery(event);
|
||||
|
||||
if (!from) return setResponseStatus(event, 400, 'from is required');
|
||||
if (!to) return setResponseStatus(event, 400, 'to is required');
|
||||
if (!eventName) return setResponseStatus(event, 400, 'name is required');
|
||||
if (!field) return setResponseStatus(event, 400, 'field is required');
|
||||
|
||||
|
||||
const aggregation: PipelineStage[] = [
|
||||
{
|
||||
$match: {
|
||||
project_id, name: eventName,
|
||||
created_at: {
|
||||
$gte: new Date(from.toString()),
|
||||
$lte: new Date(to.toString()),
|
||||
}
|
||||
}
|
||||
},
|
||||
{ $group: { _id: `$metadata.${field}`, count: { $sum: 1 } } },
|
||||
{ $sort: { count: -1 } }
|
||||
]
|
||||
|
||||
const metadataGrouped = await EventModel.aggregate(aggregation);
|
||||
|
||||
return metadataGrouped;
|
||||
|
||||
|
||||
});
|
||||
32
dashboard/server/api/data/events_data/metadata_fields.ts
Normal file
32
dashboard/server/api/data/events_data/metadata_fields.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
|
||||
import { EventModel } from "@schema/metrics/EventSchema";
|
||||
import { Redis } from "~/server/services/CacheService";
|
||||
|
||||
|
||||
export default defineEventHandler(async event => {
|
||||
|
||||
const data = await getRequestData(event, { requireSchema: false });
|
||||
if (!data) return;
|
||||
|
||||
const { project_id } = data;
|
||||
|
||||
const { name: eventName } = getQuery(event);
|
||||
if (!eventName) return [];
|
||||
|
||||
const fields: string[] = await Redis.useCache({
|
||||
key: `metadata_fields:${project_id}:${eventName}`,
|
||||
exp: 60
|
||||
}, async () => {
|
||||
const eventsWithName = await EventModel.find({ project_id, name: eventName }, { metadata: 1 }, { limit: 10, sort: { created_at: -1 } });
|
||||
const allMetadata = eventsWithName.map(e => e.metadata);
|
||||
const allFields = new Set<string>();
|
||||
for (const metadata of allMetadata) {
|
||||
const keys = Object.keys(metadata || {});
|
||||
keys.forEach(key => allFields.add(key));
|
||||
}
|
||||
return Array.from(allFields.values());
|
||||
});
|
||||
|
||||
return fields;
|
||||
|
||||
});
|
||||
26
dashboard/server/api/data/events_data/names.ts
Normal file
26
dashboard/server/api/data/events_data/names.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
|
||||
import { EventModel } from "@schema/metrics/EventSchema";
|
||||
import { Redis } from "~/server/services/CacheService";
|
||||
|
||||
|
||||
export default defineEventHandler(async event => {
|
||||
|
||||
const data = await getRequestData(event, { requireSchema: false });
|
||||
if (!data) return;
|
||||
|
||||
const { project_id } = data;
|
||||
|
||||
const names: string[] = await Redis.useCache({
|
||||
key: `event_names:${project_id}`,
|
||||
exp: 60
|
||||
}, async () => {
|
||||
const namesAggregation = await EventModel.aggregate([
|
||||
{ $match: { project_id } },
|
||||
{ $group: { _id: "$name" } }
|
||||
]);
|
||||
return namesAggregation.map(e => e._id);
|
||||
});
|
||||
|
||||
return names;
|
||||
|
||||
});
|
||||
25
dashboard/server/api/data/live_users.ts
Normal file
25
dashboard/server/api/data/live_users.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
import { SessionModel } from "@schema/metrics/SessionSchema";
|
||||
|
||||
export default defineEventHandler(async event => {
|
||||
|
||||
const data = await getRequestData(event, { requireSchema: false });
|
||||
if (!data) return;
|
||||
|
||||
const { project_id } = data;
|
||||
|
||||
const online_users = await SessionModel.aggregate([
|
||||
{
|
||||
$match: {
|
||||
project_id,
|
||||
updated_at: { $gt: new Date(Date.now() - 1000 * 60 * 5) }
|
||||
}
|
||||
},
|
||||
{ $count: 'count' }
|
||||
]);
|
||||
|
||||
if (!online_users[0]) return 0;
|
||||
|
||||
return online_users[0].count;
|
||||
|
||||
});
|
||||
Reference in New Issue
Block a user