mirror of
https://github.com/Litlyx/litlyx
synced 2025-12-09 23:48:36 +01:00
update dockerfile
This commit is contained in:
@@ -1,38 +1,28 @@
|
|||||||
# Start with a minimal Node.js base image
|
|
||||||
FROM node:21-alpine as base
|
FROM node:21-alpine as base
|
||||||
|
|
||||||
# Install pnpm globally with caching to avoid reinstalling if nothing has changed
|
|
||||||
RUN npm i -g pnpm
|
RUN npm i -g pnpm
|
||||||
|
|
||||||
# Set the working directory
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
# Copy only package-related files to leverage caching
|
COPY --link ./package.json ./tsconfig.json ./pnpm-lock.yaml ./
|
||||||
COPY --link ./scripts/package.json ./scripts/pnpm-lock.yaml ./scripts/
|
COPY --link ./scripts/package.json ./scripts/pnpm-lock.yaml ./scripts/
|
||||||
COPY --link ./shared/package.json ./shared/pnpm-lock.yaml ./shared/
|
|
||||||
COPY --link ./consumer/package.json ./consumer/pnpm-lock.yaml ./consumer/
|
COPY --link ./consumer/package.json ./consumer/pnpm-lock.yaml ./consumer/
|
||||||
|
|
||||||
# Install dependencies for each package
|
RUN pnpm install
|
||||||
|
RUN pnpm install --filter consumer
|
||||||
|
|
||||||
WORKDIR /home/app/scripts
|
WORKDIR /home/app/scripts
|
||||||
RUN pnpm install --frozen-lockfile
|
RUN pnpm install
|
||||||
|
|
||||||
WORKDIR /home/app/shared
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
WORKDIR /home/app/consumer
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# Now copy the rest of the source files
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
COPY --link ../scripts ./scripts
|
COPY --link ../scripts ./scripts
|
||||||
COPY --link ../shared ./shared
|
COPY --link ../shared ./shared
|
||||||
COPY --link ../consumer ./consumer
|
COPY --link ../consumer ./consumer
|
||||||
|
|
||||||
# Build the consumer
|
|
||||||
WORKDIR /home/app/consumer
|
WORKDIR /home/app/consumer
|
||||||
|
|
||||||
RUN pnpm run build_all
|
RUN pnpm run build
|
||||||
|
|
||||||
# Start the application
|
|
||||||
CMD ["node", "/home/app/consumer/dist/consumer/src/index.js"]
|
CMD ["node", "/home/app/consumer/dist/consumer/src/index.js"]
|
||||||
@@ -2,6 +2,7 @@ module.exports = {
|
|||||||
apps: [
|
apps: [
|
||||||
{
|
{
|
||||||
name: 'consumer',
|
name: 'consumer',
|
||||||
|
port: '3031',
|
||||||
exec_mode: 'cluster',
|
exec_mode: 'cluster',
|
||||||
instances: '2',
|
instances: '2',
|
||||||
script: './dist/consumer/src/index.js',
|
script: './dist/consumer/src/index.js',
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"express": "^4.19.2",
|
||||||
"ua-parser-js": "^1.0.37"
|
"ua-parser-js": "^1.0.37"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -9,14 +9,32 @@ import { EventModel } from "@schema/metrics/EventSchema";
|
|||||||
import { lookup } from './lookup';
|
import { lookup } from './lookup';
|
||||||
import { UAParser } from 'ua-parser-js';
|
import { UAParser } from 'ua-parser-js';
|
||||||
import { checkLimits } from './LimitChecker';
|
import { checkLimits } from './LimitChecker';
|
||||||
|
import express from 'express';
|
||||||
|
|
||||||
import { ProjectLimitModel } from '@schema/project/ProjectsLimits';
|
import { ProjectLimitModel } from '@schema/project/ProjectsLimits';
|
||||||
import { ProjectCountModel } from '@schema/project/ProjectsCounts';
|
import { ProjectCountModel } from '@schema/project/ProjectsCounts';
|
||||||
|
|
||||||
|
|
||||||
|
const app = express();
|
||||||
|
|
||||||
|
let durations: number[] = [];
|
||||||
|
|
||||||
|
app.get('/status', async (req, res) => {
|
||||||
|
try {
|
||||||
|
return res.json({ status: 'ALIVE', durations })
|
||||||
|
} catch (ex) {
|
||||||
|
console.error(ex);
|
||||||
|
return res.setStatus(500).json({ error: ex.message });
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
app.listen(process.env.PORT);
|
||||||
|
|
||||||
connectDatabase(requireEnv('MONGO_CONNECTION_STRING'));
|
connectDatabase(requireEnv('MONGO_CONNECTION_STRING'));
|
||||||
main();
|
main();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
|
|
||||||
await RedisStreamService.connect();
|
await RedisStreamService.connect();
|
||||||
@@ -31,9 +49,10 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function processStreamEntry(data: Record<string, string>) {
|
async function processStreamEntry(data: Record<string, string>) {
|
||||||
try {
|
|
||||||
|
|
||||||
const start = Date.now();
|
const start = Date.now();
|
||||||
|
|
||||||
|
try {
|
||||||
|
|
||||||
const eventType = data._type;
|
const eventType = data._type;
|
||||||
if (!eventType) return;
|
if (!eventType) return;
|
||||||
@@ -54,13 +73,19 @@ async function processStreamEntry(data: Record<string, string>) {
|
|||||||
await process_visit(data, sessionHash);
|
await process_visit(data, sessionHash);
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = Date.now() - start;
|
|
||||||
|
|
||||||
// console.log('Entry processed in', duration, 'ms');
|
// console.log('Entry processed in', duration, 'ms');
|
||||||
|
|
||||||
} catch (ex: any) {
|
} catch (ex: any) {
|
||||||
console.error('ERROR PROCESSING STREAM EVENT', ex.message);
|
console.error('ERROR PROCESSING STREAM EVENT', ex.message);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const duration = Date.now() - start;
|
||||||
|
|
||||||
|
durations.push(duration);
|
||||||
|
if (durations.length > 1000) {
|
||||||
|
durations = durations.splice(500);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function process_visit(data: Record<string, string>, sessionHash: string) {
|
async function process_visit(data: Record<string, string>, sessionHash: string) {
|
||||||
|
|||||||
@@ -1,50 +1,31 @@
|
|||||||
# Start with a minimal Node.js base image
|
|
||||||
FROM node:21-alpine AS base
|
FROM node:21-alpine AS base
|
||||||
|
|
||||||
# Create a distinct build environment
|
|
||||||
FROM base AS build
|
FROM base AS build
|
||||||
|
|
||||||
# Install pnpm globally with caching to avoid reinstalling if nothing has changed
|
|
||||||
RUN npm i -g pnpm
|
RUN npm i -g pnpm
|
||||||
|
|
||||||
# Set the working directory
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
# Copy only package-related files to leverage caching
|
COPY --link ./package.json ./tsconfig.json ./pnpm-lock.yaml ./
|
||||||
COPY --link ./dashboard/package.json ./dashboard/pnpm-lock.yaml ./dashboard/
|
COPY --link ./dashboard/package.json ./dashboard/pnpm-lock.yaml ./dashboard/
|
||||||
COPY --link ./lyx-ui/package.json ./lyx-ui/pnpm-lock.yaml ./lyx-ui/
|
|
||||||
COPY --link ./shared/package.json ./shared/pnpm-lock.yaml ./shared/
|
|
||||||
|
|
||||||
# Install dependencies for each package
|
RUN pnpm install
|
||||||
WORKDIR /home/app/lyx-ui
|
RUN pnpm install --filter dashboard
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# WORKDIR /home/app/shared
|
|
||||||
# RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
WORKDIR /home/app/dashboard
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# Now copy the rest of the source files
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
COPY --link ./dashboard ./dashboard
|
COPY --link ./dashboard ./dashboard
|
||||||
COPY --link ./lyx-ui ./lyx-ui
|
|
||||||
COPY --link ./shared ./shared
|
COPY --link ./shared ./shared
|
||||||
|
|
||||||
# Build the dashboard
|
|
||||||
WORKDIR /home/app/dashboard
|
WORKDIR /home/app/dashboard
|
||||||
|
|
||||||
RUN pnpm run build
|
RUN pnpm run build
|
||||||
|
|
||||||
# Use a smaller base image for the final production build
|
|
||||||
FROM node:21-alpine AS production
|
FROM node:21-alpine AS production
|
||||||
|
|
||||||
# Set the working directory for the production container
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
# Copy the built application from the build stage
|
|
||||||
COPY --from=build /home/app/dashboard/.output /home/app/.output
|
COPY --from=build /home/app/dashboard/.output /home/app/.output
|
||||||
|
|
||||||
# Start the application
|
|
||||||
CMD ["node", "/home/app/.output/server/index.mjs"]
|
CMD ["node", "/home/app/.output/server/index.mjs"]
|
||||||
@@ -22,6 +22,7 @@
|
|||||||
"googleapis": "^144.0.0",
|
"googleapis": "^144.0.0",
|
||||||
"highlight.js": "^11.10.0",
|
"highlight.js": "^11.10.0",
|
||||||
"jsonwebtoken": "^9.0.2",
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"litlyx-js": "^1.0.3",
|
||||||
"nuxt": "^3.11.2",
|
"nuxt": "^3.11.2",
|
||||||
"nuxt-vue3-google-signin": "^0.0.11",
|
"nuxt-vue3-google-signin": "^0.0.11",
|
||||||
"openai": "^4.61.0",
|
"openai": "^4.61.0",
|
||||||
|
|||||||
@@ -1,105 +0,0 @@
|
|||||||
<script setup lang="ts">
|
|
||||||
|
|
||||||
import SupabaseChartDialog from '~/components/integrations/SupabaseChartDialog.vue';
|
|
||||||
|
|
||||||
definePageMeta({ layout: 'dashboard' });
|
|
||||||
const activeProjectId = useActiveProjectId();
|
|
||||||
|
|
||||||
|
|
||||||
const { createAlert } = useAlert();
|
|
||||||
|
|
||||||
const {
|
|
||||||
supabaseUrl, supabaseAnonKey, supabaseServiceRoleKey, integrationsCredentials,
|
|
||||||
supabaseIntegrations, updateIntegrationsCredentails
|
|
||||||
} = useSupabase()
|
|
||||||
|
|
||||||
async function updateCredentials() {
|
|
||||||
|
|
||||||
const res = await updateIntegrationsCredentails({
|
|
||||||
supabase_url: supabaseUrl.value,
|
|
||||||
supabase_anon_key: supabaseAnonKey.value,
|
|
||||||
supabase_service_role_key: supabaseServiceRoleKey.value
|
|
||||||
});
|
|
||||||
|
|
||||||
if (res.ok === true) {
|
|
||||||
integrationsCredentials.refresh();
|
|
||||||
createAlert('Credentials updated', 'Credentials updated successfully', 'far fa-error', 4000);
|
|
||||||
} else {
|
|
||||||
createAlert('Error updating credentials', res.error, 'far fa-error', 4000);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
const { openDialogEx } = useCustomDialog()
|
|
||||||
|
|
||||||
function showChartDialog() {
|
|
||||||
openDialogEx(SupabaseChartDialog, {
|
|
||||||
closable: true,
|
|
||||||
width: '55vw',
|
|
||||||
height: '65vh'
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
</script>
|
|
||||||
|
|
||||||
|
|
||||||
<template>
|
|
||||||
|
|
||||||
<div class="home w-full h-full px-10 pt-6 overflow-y-auto">
|
|
||||||
|
|
||||||
<CardTitled title="Supabase integration" class="w-full">
|
|
||||||
<template #header>
|
|
||||||
<img class="h-10 w-10" :src="'supabase.svg'" alt="Supabase logo">
|
|
||||||
</template>
|
|
||||||
<div class="flex gap-6 flex-col w-full">
|
|
||||||
<div class="flex flex-col">
|
|
||||||
<div class="text-lyx-text"> Supabase url </div>
|
|
||||||
<div class="text-lyx-text-dark"> Required to fetch data from supabase </div>
|
|
||||||
<LyxUiInput v-if="!integrationsCredentials.pending.value" class="w-full mt-2 px-4 py-1"
|
|
||||||
v-model="supabaseUrl" type="text"></LyxUiInput>
|
|
||||||
<div v-if="integrationsCredentials.pending.value"> Loading... </div>
|
|
||||||
</div>
|
|
||||||
<div class="flex flex-col">
|
|
||||||
<div class="text-lyx-text"> Supabase anon key </div>
|
|
||||||
<div class="text-lyx-text-dark"> Required to fetch data from supabase </div>
|
|
||||||
<LyxUiInput v-if="!integrationsCredentials.pending.value" class="w-full mt-2 px-4 py-1"
|
|
||||||
v-model="supabaseAnonKey" type="password"></LyxUiInput>
|
|
||||||
<div v-if="integrationsCredentials.pending.value"> Loading... </div>
|
|
||||||
</div>
|
|
||||||
<div class="flex flex-col">
|
|
||||||
<div class="text-lyx-text"> Supabase service role key </div>
|
|
||||||
<div class="text-lyx-text-dark"> Only used if you need to bypass RLS </div>
|
|
||||||
<LyxUiInput v-if="!integrationsCredentials.pending.value" class="w-full mt-2 px-4 py-1"
|
|
||||||
v-model="supabaseServiceRoleKey" type="password"></LyxUiInput>
|
|
||||||
<div v-if="integrationsCredentials.pending.value"> Loading... </div>
|
|
||||||
</div>
|
|
||||||
<div class="flex gap-3">
|
|
||||||
<LyxUiButton v-if="!integrationsCredentials.pending.value" @click="updateCredentials()"
|
|
||||||
type="primary"> Save
|
|
||||||
</LyxUiButton>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</CardTitled>
|
|
||||||
|
|
||||||
|
|
||||||
<LyxUiCard class="mt-6 w-full">
|
|
||||||
<div class="flex flex-col gap-8">
|
|
||||||
<div class="flex gap-2 items-center" v-for="supabaseIntegration of supabaseIntegrations.data.value">
|
|
||||||
<div> {{ supabaseIntegration.name }} </div>
|
|
||||||
<div> <i class="far fa-edit"></i> </div>
|
|
||||||
<div> <i class="far fa-trash"></i> </div>
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<LyxUiButton type="primary" @click="showChartDialog()"> Add supabase chart </LyxUiButton>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</LyxUiCard>
|
|
||||||
|
|
||||||
|
|
||||||
<div class="mt-10">
|
|
||||||
<IntegrationsSupabaseLineChart integration_id="66f6c558d97e4abd408feee0"></IntegrationsSupabaseLineChart>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</template>
|
|
||||||
@@ -39,7 +39,10 @@ services:
|
|||||||
consumer:
|
consumer:
|
||||||
image: litlyx-consumer
|
image: litlyx-consumer
|
||||||
restart: always
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "3011:3031"
|
||||||
environment:
|
environment:
|
||||||
|
PORT: "3031"
|
||||||
# Optional - Used to send welcome and quota emails
|
# Optional - Used to send welcome and quota emails
|
||||||
# EMAIL_SERVICE: "Brevo"
|
# EMAIL_SERVICE: "Brevo"
|
||||||
# BREVO_API_KEY: ""
|
# BREVO_API_KEY: ""
|
||||||
|
|||||||
1395
pnpm-lock.yaml
generated
1395
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,38 +1,28 @@
|
|||||||
# Start with a minimal Node.js base image
|
|
||||||
FROM node:21-alpine as base
|
FROM node:21-alpine as base
|
||||||
|
|
||||||
# Install pnpm globally with caching to avoid reinstalling if nothing has changed
|
|
||||||
RUN npm i -g pnpm
|
RUN npm i -g pnpm
|
||||||
|
|
||||||
# Set the working directory
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
# Copy only package-related files to leverage caching
|
COPY --link ./package.json ./tsconfig.json ./pnpm-lock.yaml ./
|
||||||
COPY --link ./scripts/package.json ./scripts/pnpm-lock.yaml ./scripts/
|
COPY --link ./scripts/package.json ./scripts/pnpm-lock.yaml ./scripts/
|
||||||
COPY --link ./shared/package.json ./shared/pnpm-lock.yaml ./shared/
|
|
||||||
COPY --link ./producer/package.json ./producer/pnpm-lock.yaml ./producer/
|
COPY --link ./producer/package.json ./producer/pnpm-lock.yaml ./producer/
|
||||||
|
|
||||||
# Install dependencies for each package
|
RUN pnpm install
|
||||||
|
RUN pnpm install --filter producer
|
||||||
|
|
||||||
WORKDIR /home/app/scripts
|
WORKDIR /home/app/scripts
|
||||||
RUN pnpm install --frozen-lockfile
|
RUN pnpm install
|
||||||
|
|
||||||
WORKDIR /home/app/shared
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
WORKDIR /home/app/producer
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# Now copy the rest of the source files
|
|
||||||
WORKDIR /home/app
|
WORKDIR /home/app
|
||||||
|
|
||||||
COPY --link ../scripts ./scripts
|
COPY --link ../scripts ./scripts
|
||||||
COPY --link ../shared ./shared
|
COPY --link ../shared ./shared
|
||||||
COPY --link ../producer ./producer
|
COPY --link ../producer ./producer
|
||||||
|
|
||||||
# Build the producer
|
|
||||||
WORKDIR /home/app/producer
|
WORKDIR /home/app/producer
|
||||||
|
|
||||||
RUN pnpm run build_all
|
RUN pnpm run build
|
||||||
|
|
||||||
# Start the application
|
|
||||||
CMD ["node", "/home/app/producer/dist/producer/src/index.js"]
|
CMD ["node", "/home/app/producer/dist/producer/src/index.js"]
|
||||||
@@ -18,8 +18,6 @@ type ConsumerGroup = typeof consumerGroups[number];
|
|||||||
|
|
||||||
export class RedisStreamService {
|
export class RedisStreamService {
|
||||||
|
|
||||||
private static processed = 0;
|
|
||||||
|
|
||||||
private static client = createClient({
|
private static client = createClient({
|
||||||
url: requireEnv("REDIS_URL"),
|
url: requireEnv("REDIS_URL"),
|
||||||
username: requireEnv("REDIS_USERNAME"),
|
username: requireEnv("REDIS_USERNAME"),
|
||||||
@@ -45,7 +43,6 @@ export class RedisStreamService {
|
|||||||
await process_function(messageData.message);
|
await process_function(messageData.message);
|
||||||
await this.client.xAck(stream_name, group_name, messageData.id);
|
await this.client.xAck(stream_name, group_name, messageData.id);
|
||||||
await this.client.set(`ACK:${group_name}`, messageData.id);
|
await this.client.set(`ACK:${group_name}`, messageData.id);
|
||||||
RedisStreamService.processed++;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,13 +71,6 @@ export class RedisStreamService {
|
|||||||
|
|
||||||
if (!consumerGroups.includes(options.group_name)) return console.error('GROUP NAME NOT ALLOWED');
|
if (!consumerGroups.includes(options.group_name)) return console.error('GROUP NAME NOT ALLOWED');
|
||||||
|
|
||||||
setInterval(() => {
|
|
||||||
if (RedisStreamService.processed > 0) {
|
|
||||||
console.log('Processed:', (RedisStreamService.processed / 10).toFixed(2), '/s');
|
|
||||||
RedisStreamService.processed = 0;
|
|
||||||
}
|
|
||||||
}, 10_000);
|
|
||||||
|
|
||||||
console.log('Start reading loop')
|
console.log('Start reading loop')
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|||||||
Reference in New Issue
Block a user