mirror of
https://github.com/Litlyx/litlyx
synced 2025-12-10 07:48:37 +01:00
add secutiry
This commit is contained in:
24
security/src/Aggregations.ts
Normal file
24
security/src/Aggregations.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import DateService, { Slice } from '@services/DateService';
|
||||
import { Model, Types } from "mongoose";
|
||||
|
||||
|
||||
export async function getAggregation(model: Model<any>, pid: Types.ObjectId, from: number, to: number, slice: Slice) {
|
||||
|
||||
const { group, sort, fromParts } = DateService.getQueryDateRange(slice);
|
||||
|
||||
const result = model.aggregate([
|
||||
{
|
||||
$match: {
|
||||
project_id: pid,
|
||||
created_at: { $gte: new Date(from), $lte: new Date(to) },
|
||||
}
|
||||
},
|
||||
{ $group: { _id: group, count: { $sum: 1 } } },
|
||||
{ $sort: sort },
|
||||
{ $project: { _id: { $dateFromParts: fromParts }, count: "$count" } }
|
||||
]);
|
||||
|
||||
return result;
|
||||
|
||||
}
|
||||
|
||||
156
security/src/AnomalyService.ts
Normal file
156
security/src/AnomalyService.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
import mongoose from "mongoose";
|
||||
import { AnomalyDomainModel } from '@schema/anomalies/AnomalyDomainSchema';
|
||||
import { AnomalyVisitModel } from '@schema/anomalies/AnomalyVisitSchema';
|
||||
import { AnomalyEventsModel } from '@schema/anomalies/AnomalyEventsSchema';
|
||||
import { EventModel } from "@schema/metrics/EventSchema";
|
||||
import { VisitModel } from '@schema/metrics/VisitSchema'
|
||||
import EmailService from "@services/EmailService";
|
||||
import * as url from 'url';
|
||||
import { ProjectModel } from "@schema/ProjectSchema";
|
||||
import { UserModel } from "@schema/UserSchema";
|
||||
import { getAggregation } from "./Aggregations";
|
||||
|
||||
type TAvgInput = { _id: string, count: number }
|
||||
|
||||
export type AnomalyReport = {
|
||||
visits: TAvgInput[],
|
||||
events: TAvgInput[],
|
||||
dns: string[],
|
||||
pid: string
|
||||
}
|
||||
|
||||
export type AnomalyCallback = (report: AnomalyReport) => any;
|
||||
|
||||
const anomalyData = { minutes: 0 }
|
||||
|
||||
export async function anomalyCheckAll(callback: AnomalyCallback) {
|
||||
const start = performance.now();
|
||||
console.log('[ANOMALY] START ANOMALY CHECK');
|
||||
const projects = await ProjectModel.find({}, { _id: 1 });
|
||||
let i = 0;
|
||||
for (const project of projects) {
|
||||
console.log('Project:', i++, '/', projects.length);
|
||||
await findAnomalies(project.id, callback);
|
||||
}
|
||||
const end = performance.now() - start;
|
||||
console.log('END ANOMALY CHECK', end, 'ms');
|
||||
}
|
||||
|
||||
export function anomalyLoop(callback: AnomalyCallback) {
|
||||
if (anomalyData.minutes == 60 * 12) {
|
||||
anomalyCheckAll(callback);
|
||||
anomalyData.minutes = 0;
|
||||
}
|
||||
anomalyData.minutes++;
|
||||
setTimeout(() => anomalyLoop(callback), 1000 * 60);
|
||||
}
|
||||
|
||||
|
||||
function movingAverageAnomaly(visits: TAvgInput[], windowSize: number, threshold: number): TAvgInput[] {
|
||||
const anomalies: TAvgInput[] = [];
|
||||
for (let i = windowSize; i < visits.length; i++) {
|
||||
const window = visits.slice(i - windowSize, i);
|
||||
const mean = window.reduce((a, b) => a + b.count, 0) / window.length;
|
||||
const stdDev = Math.sqrt(window.reduce((sum, visit) => sum + Math.pow(visit.count - mean, 2), 0) / window.length);
|
||||
const currentVisit = visits[i];
|
||||
if (Math.abs(currentVisit.count - mean) > threshold * stdDev) {
|
||||
if (currentVisit.count <= mean) continue;
|
||||
anomalies.push(currentVisit);
|
||||
}
|
||||
}
|
||||
return anomalies;
|
||||
}
|
||||
|
||||
function getUrlFromString(str: string) {
|
||||
const res = str.startsWith('http') ? str : 'http://' + str;
|
||||
return res;
|
||||
}
|
||||
|
||||
export async function findAnomalies(project_id: string, callback: AnomalyCallback) {
|
||||
|
||||
const THRESHOLD = 6;
|
||||
const WINDOW_SIZE = 14;
|
||||
|
||||
const pid = new mongoose.Types.ObjectId(project_id) as any;
|
||||
|
||||
const from = Date.now() - 1000 * 60 * 60 * 24 * 30;
|
||||
const to = Date.now() - 1000 * 60 * 60 * 24;
|
||||
|
||||
const visitsTimelineData = await getAggregation(VisitModel, pid, from, to, 'day');
|
||||
|
||||
const eventsTimelineData = await getAggregation(EventModel, pid, from, to, 'day');
|
||||
|
||||
|
||||
const websites: { _id: string, count: number }[] = await VisitModel.aggregate([
|
||||
{ $match: { project_id: pid, created_at: { $gte: new Date(from), $lte: new Date(to) } }, },
|
||||
{ $group: { _id: "$website", count: { $sum: 1, } } }
|
||||
]);
|
||||
|
||||
|
||||
const detectedWebsites: string[] = [];
|
||||
|
||||
if (websites.length > 0) {
|
||||
const rootWebsite = websites.reduce((a, e) => {
|
||||
return a.count > e.count ? a : e;
|
||||
});
|
||||
const rootDomain = new url.URL(getUrlFromString(rootWebsite._id)).hostname;
|
||||
for (const website of websites) {
|
||||
const websiteDomain = new url.URL(getUrlFromString(website._id)).hostname;
|
||||
|
||||
if (websiteDomain === 'localhost') continue;
|
||||
if (websiteDomain === '127.0.0.1') continue;
|
||||
if (websiteDomain === '0.0.0.0') continue;
|
||||
|
||||
if (!websiteDomain.includes(rootDomain)) { detectedWebsites.push(website._id); }
|
||||
}
|
||||
}
|
||||
|
||||
const visitAnomalies = movingAverageAnomaly(visitsTimelineData, WINDOW_SIZE, THRESHOLD);
|
||||
const eventAnomalies = movingAverageAnomaly(eventsTimelineData, WINDOW_SIZE, THRESHOLD);
|
||||
|
||||
const report: AnomalyReport = {
|
||||
visits: [],
|
||||
events: [],
|
||||
dns: [],
|
||||
pid: project_id
|
||||
}
|
||||
|
||||
for (const visit of visitAnomalies) {
|
||||
const anomalyAlreadyExist = await AnomalyVisitModel.findOne({ visitDate: visit._id }, { _id: 1 });
|
||||
if (anomalyAlreadyExist) continue;
|
||||
await AnomalyVisitModel.create({ project_id: pid, visitDate: visit._id, created_at: Date.now() });
|
||||
report.visits.push(visit);
|
||||
}
|
||||
|
||||
for (const event of eventAnomalies) {
|
||||
const anomalyAlreadyExist = await AnomalyEventsModel.findOne({ eventDate: event._id }, { _id: 1 });
|
||||
if (anomalyAlreadyExist) continue;
|
||||
await AnomalyEventsModel.create({ project_id: pid, eventDate: event._id, created_at: Date.now() });
|
||||
report.events.push(event);
|
||||
}
|
||||
|
||||
for (const website of detectedWebsites) {
|
||||
const anomalyAlreadyExist = await AnomalyDomainModel.findOne({ domain: website }, { _id: 1 });
|
||||
if (anomalyAlreadyExist) continue;
|
||||
await AnomalyDomainModel.create({ project_id: pid, domain: website, created_at: Date.now() });
|
||||
report.dns.push(website);
|
||||
}
|
||||
|
||||
// const project = await ProjectModel.findById(pid);
|
||||
// if (!project) return { ok: false, error: 'Cannot find project with id ' + pid.toString() }
|
||||
// const user = await UserModel.findById(project.owner);
|
||||
// if (!user) return { ok: false, error: 'Cannot find user with id ' + project.owner.toString() }
|
||||
|
||||
// if (shouldSendMail.visitsEvents === true) {
|
||||
// await EmailService.sendAnomalyVisitsEventsEmail(user.email, project.name);
|
||||
// }
|
||||
// if (shouldSendMail.domains === true) {
|
||||
// await EmailService.sendAnomalyDomainEmail(user.email, project.name);
|
||||
// }
|
||||
|
||||
|
||||
callback(report);
|
||||
return report;
|
||||
|
||||
|
||||
}
|
||||
18
security/src/index.ts
Normal file
18
security/src/index.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { anomalyCheckAll, AnomalyReport } from "./AnomalyService";
|
||||
import { connectDatabase } from '@services/DatabaseService'
|
||||
import { requireEnv } from '@utils/requireEnv'
|
||||
|
||||
connectDatabase(requireEnv('MONGO_CONNECTION_STRING'));
|
||||
|
||||
|
||||
import fs from 'fs';
|
||||
|
||||
const reports: AnomalyReport[] = [];
|
||||
|
||||
anomalyCheckAll(report => {
|
||||
if (report.visits.length > 0 || report.events.length > 0 || report.dns.length > 0) {
|
||||
reports.push(report);
|
||||
}
|
||||
}).then(e => {
|
||||
fs.writeFileSync('security-report.json', JSON.stringify(reports));
|
||||
});
|
||||
Reference in New Issue
Block a user