change in progress

This commit is contained in:
Emily
2024-10-02 17:05:34 +02:00
parent f516c53b7b
commit 314660d8a3
22 changed files with 503 additions and 438 deletions

View File

@@ -0,0 +1,34 @@
import { VisitModel } from "@schema/metrics/VisitSchema";
import { Redis } from "~/server/services/CacheService";
import { getRequestData } from "~/server/utils/getRequestData";
export default defineEventHandler(async event => {
const data = await getRequestData(event, { requireSchema: false });
if (!data) return;
const { pid, from, to, project_id, limit } = data;
const cacheKey = `browsers:${pid}:${from}:${to}`;
const cacheExp = 60;
return await Redis.useCacheV2(cacheKey, cacheExp, async () => {
const result = await VisitModel.aggregate([
{
$match: {
project_id,
created_at: { $gte: new Date(from), $lte: new Date(to) }
}
},
{ $group: { _id: "$browser", count: { $sum: 1, } } },
{ $sort: { count: -1 } },
{ $limit: limit }
]);
return result as { _id: string, count: number }[];
});
});

View File

@@ -1,57 +1,28 @@
import { EventModel } from "@schema/metrics/EventSchema";
import { getUserProjectFromId } from "~/server/LIVE_DEMO_DATA";
import { Redis } from "~/server/services/CacheService";
import type { Model } from "mongoose";
const allowedModels: Record<string, { model: Model<any>, field: string }> = {
'events': {
model: EventModel,
field: 'name'
}
}
type TModelName = keyof typeof allowedModels;
import { getRequestData } from "~/server/utils/getRequestData";
export default defineEventHandler(async event => {
const project_id = getHeader(event, 'x-pid');
if (!project_id) return;
const user = getRequestUser(event);
const project = await getUserProjectFromId(project_id, user);
if (!project) return;
const from = getRequestHeader(event, 'x-from');
const to = getRequestHeader(event, 'x-to');
const data = await getRequestData(event);
if (!data) return;
if (!from || !to) return setResponseStatus(event, 400, 'x-from and x-to are required');
const { schemaName, pid, from, to, model, project_id } = data;
const schemaName = getRequestHeader(event, 'x-schema');
if (!schemaName) return setResponseStatus(event, 400, 'x-schema is required');
if (!Object.keys(allowedModels).includes(schemaName)) return setResponseStatus(event, 400, 'x-schema value is not valid');
const cacheKey = `count:${schemaName}:${project_id}:${from}:${to}`;
const cacheKey = `count:${schemaName}:${pid}:${from}:${to}`;
const cacheExp = 60;
return await Redis.useCacheV2(cacheKey, cacheExp, async (noStore, updateExp) => {
const { model } = allowedModels[schemaName as TModelName];
return await Redis.useCacheV2(cacheKey, cacheExp, async () => {
const result = await model.aggregate([
{
$match: {
project_id: project._id,
created_at: {
$gte: new Date(from),
$lte: new Date(to)
}
project_id,
created_at: { $gte: new Date(from), $lte: new Date(to) }
}
},
{
$count: 'total'
}
{ $count: 'count' },
]);
return result;

View File

@@ -1,65 +0,0 @@
import { EventModel } from "@schema/metrics/EventSchema";
import { getUserProjectFromId } from "~/server/LIVE_DEMO_DATA";
import { Redis } from "~/server/services/CacheService";
import type { Model } from "mongoose";
const allowedModels: Record<string, { model: Model<any>, field: string }> = {
'events': {
model: EventModel,
field: 'name'
}
}
type TModelName = keyof typeof allowedModels;
export default defineEventHandler(async event => {
const project_id = getHeader(event, 'x-pid');
if (!project_id) return;
const user = getRequestUser(event);
const project = await getUserProjectFromId(project_id, user);
if (!project) return;
const from = getRequestHeader(event, 'x-from');
const to = getRequestHeader(event, 'x-to');
if (!from || !to) return setResponseStatus(event, 400, 'x-from and x-to are required');
const schemaName = getRequestHeader(event, 'x-schema');
if (!schemaName) return setResponseStatus(event, 400, 'x-schema is required');
if (!Object.keys(allowedModels).includes(schemaName)) return setResponseStatus(event, 400, 'x-schema value is not valid');
const limitHeader = getRequestHeader(event, 'x-query-limit');
const limitNumber = parseInt(limitHeader || '10');
const limit = isNaN(limitNumber) ? 10 : limitNumber;
const cacheKey = `${schemaName}:${project_id}:${from}:${to}`;
const cacheExp = 60;
return await Redis.useCacheV2(cacheKey, cacheExp, async (noStore, updateExp) => {
const { model } = allowedModels[schemaName as TModelName];
const result = await model.aggregate([
{
$match: {
project_id: project._id,
created_at: {
$gte: new Date(from),
$lte: new Date(to)
}
}
},
{ $group: { _id: "$name", count: { $sum: 1, } } },
{ $sort: { count: -1 } },
{ $limit: limit }
]);
return result;
});
});

View File

@@ -7,21 +7,14 @@ import mongoose from "mongoose";
export default defineEventHandler(async event => {
const project_id = getHeader(event, 'x-pid');
if (!project_id) return;
const user = getRequestUser(event);
const project = await getUserProjectFromId(project_id, user);
if (!project) return;
const data = await getRequestData(event, { requireSchema: false, requireSlice: true });
if (!data) return;
const from = getRequestHeader(event, 'x-from');
const to = getRequestHeader(event, 'x-to');
const { pid, from, to, slice, project_id } = data;
if (!from || !to) return setResponseStatus(event, 400, 'x-from and x-to are required');
const slice = getRequestHeader(event, 'x-slice');
const cacheKey = `bouncing_rate:${project_id}:${from}:${to}`;
const cacheKey = `timeline:bouncing_rate:${pid}:${from}:${to}`;
const cacheExp = 60 * 60; //1 hour
return await Redis.useCacheV2(cacheKey, cacheExp, async (noStore, updateExp) => {
@@ -37,14 +30,14 @@ export default defineEventHandler(async event => {
const allDates = DateService.createBetweenDates(from, to, slice as any);
const result: { date: string, value: number }[] = [];
const result: { _id: string, count: number }[] = [];
for (const date of allDates.dates) {
const visits = await VisitModel.aggregate([
{
$match: {
project_id: project._id,
project_id: project_id,
created_at: {
$gte: date.startOf(slice as any).toDate(),
$lte: date.endOf(slice as any).toDate()
@@ -57,22 +50,30 @@ export default defineEventHandler(async event => {
const sessions = await SessionModel.aggregate([
{
$match: {
project_id: project._id,
project_id: project_id,
created_at: {
$gte: date.startOf(slice as any).toDate(),
$lte: date.endOf(slice as any).toDate()
}
},
},
{ $group: { _id: "$session", count: { $sum: 1, }, duration: { $sum: '$duration' } } },
{
$group: {
_id: "$session", count: { $sum: 1, },
duration: { $sum: '$duration' }
}
},
]);
const total = visits.length;
const bounced = sessions.filter(e => (e.duration / e.count) < 1).length;
const bouncing_rate = 100 / total * bounced;
result.push({ date: date.toISOString(), value: bouncing_rate });
result.push({
_id: date.toISOString(),
count: bouncing_rate
});
}
return result;
});

View File

@@ -0,0 +1,29 @@
import { SessionModel } from "@schema/metrics/SessionSchema";
import { Redis } from "~/server/services/CacheService";
import { executeTimelineAggregation, fillAndMergeTimelineAggregationV2 } from "~/server/services/TimelineService";
export default defineEventHandler(async event => {
const data = await getRequestData(event, { requireSchema: false, requireSlice: true });
if (!data) return;
const { pid, from, to, slice, project_id } = data;
const cacheKey = `timeline:sessions:${pid}:${from}:${to}`;
const cacheExp = 60;
return await Redis.useCacheV2(cacheKey, cacheExp, async () => {
const timelineData = await executeTimelineAggregation({
projectId: project_id,
model: SessionModel,
from, to, slice,
});
const timelineFilledMerged = fillAndMergeTimelineAggregationV2(timelineData, slice, from, to);
return timelineFilledMerged;
});
});

View File

@@ -0,0 +1,35 @@
import { SessionModel } from "@schema/metrics/SessionSchema";
import { Redis } from "~/server/services/CacheService";
import { executeAdvancedTimelineAggregation, fillAndMergeTimelineAggregationV2 } from "~/server/services/TimelineService";
export default defineEventHandler(async event => {
const data = await getRequestData(event, { requireSchema: false, requireSlice: true });
if (!data) return;
const { pid, from, to, slice, project_id } = data;
const cacheKey = `timeline:sessions_duration:${pid}:${from}:${to}`;
const cacheExp = 60;
return await Redis.useCacheV2(cacheKey, cacheExp, async () => {
const timelineData = await executeAdvancedTimelineAggregation({
projectId: project_id,
model: SessionModel,
from, to, slice,
customGroup: {
duration: { $sum: '$duration' }
},
customProjection: {
count: { $divide: ["$duration", "$count"] }
},
});
const timelineFilledMerged = fillAndMergeTimelineAggregationV2(timelineData, slice, from, to);
return timelineFilledMerged;
});
});

View File

@@ -0,0 +1,29 @@
import { VisitModel } from "@schema/metrics/VisitSchema";
import { Redis } from "~/server/services/CacheService";
import { executeTimelineAggregation, fillAndMergeTimelineAggregationV2 } from "~/server/services/TimelineService";
export default defineEventHandler(async event => {
const data = await getRequestData(event, { requireSchema: false, requireSlice: true });
if (!data) return;
const { pid, from, to, slice, project_id } = data;
const cacheKey = `timeline:visits:${pid}:${from}:${to}`;
const cacheExp = 60;
return await Redis.useCacheV2(cacheKey, cacheExp, async () => {
const timelineData = await executeTimelineAggregation({
projectId: project_id,
model: VisitModel,
from, to, slice,
});
const timelineFilledMerged = fillAndMergeTimelineAggregationV2(timelineData, slice, from, to);
return timelineFilledMerged;
});
});