From b38363ddf5d310a2ead4b2355843e4a45625e47a Mon Sep 17 00:00:00 2001 From: Emily Date: Wed, 18 Dec 2024 16:49:18 +0100 Subject: [PATCH] fix ai message + add typer --- dashboard/composables/useTextType.ts | 36 ++++++++++ dashboard/pages/analyst.vue | 70 ++++++++++++++------ dashboard/server/ai/functions/AI_Sessions.ts | 4 +- dashboard/server/services/AiService.ts | 8 ++- 4 files changed, 91 insertions(+), 27 deletions(-) create mode 100644 dashboard/composables/useTextType.ts diff --git a/dashboard/composables/useTextType.ts b/dashboard/composables/useTextType.ts new file mode 100644 index 0000000..b6caf7e --- /dev/null +++ b/dashboard/composables/useTextType.ts @@ -0,0 +1,36 @@ + + + + +export function useTextType(options: { ms: number, increase: number }, onTickAction?: () => any) { + + let interval: any; + const index = ref(0); + + function onTick() { + index.value += options.increase; + onTickAction?.(); + } + + function pause() { + if (interval) clearInterval(interval); + } + + function resume() { + if (interval) clearInterval(interval); + interval = setInterval(() => onTick(), options.ms); + } + + function stop() { + if (interval) clearTimeout(interval); + } + + function start() { + index.value = 0; + if (interval) clearInterval(interval); + interval = setInterval(() => onTick(), options.ms); + } + + return { start, stop, resume, pause, index, interval } + +} \ No newline at end of file diff --git a/dashboard/pages/analyst.vue b/dashboard/pages/analyst.vue index 021468c..b55ae65 100644 --- a/dashboard/pages/analyst.vue +++ b/dashboard/pages/analyst.vue @@ -26,13 +26,28 @@ const loading = ref(false); const currentChatId = ref(""); const currentChatMessages = ref<{ role: string, content: string, charts?: any[], tool_calls?: any }[]>([]); -const currentChatMessageDelta = ref(''); +const currentChatMessageDelta = ref(""); -const currentChatMessageDeltaHtml = computed(() => { - const lastData = currentChatMessageDelta.value.match(/\[(data:(.*?))\]/g); + +const typer = useTextType({ ms: 10, increase: 2 }, () => { const cleanMessage = currentChatMessageDelta.value.replace(/\[(data:(.*?))\]/g, ''); - if (!lastData || lastData.length == 0) return cleanMessage; - return `
${lastData.at(-1)}
${cleanMessage}
`; + if (typer.index.value >= cleanMessage.length) typer.pause(); +}); + +onUnmounted(() => { + typer.stop(); +}) + +const currentChatMessageDeltaTextVisible = computed(() => { + const cleanMessage = currentChatMessageDelta.value.replace(/\[(data:(.*?))\]/g, ''); + const textVisible = cleanMessage.substring(0, typer.index.value); + setTimeout(() => scrollToBottom(), 1); + return textVisible; +}); + +const currentChatMessageDeltaShowLoader = computed(() => { + const lastData = currentChatMessageDelta.value.match(/\[(data:(.*?))\]$/); + return lastData != null; }); const scroller = ref(null); @@ -51,10 +66,16 @@ async function pollSendMessageStatus(chat_id: string, times: number, updateStatu updateStatus(res.status); + + typer.resume(); + + if (res.completed === false) { - setTimeout(() => pollSendMessageStatus(chat_id, times + 1, updateStatus), (times > 20 ? 1000 : 500)); + setTimeout(() => pollSendMessageStatus(chat_id, times + 1, updateStatus), (times > 10 ? 2000 : 1000)); } else { + typer.stop(); + const messages = await $fetch(`/api/ai/${chat_id}/get_messages`, { headers: useComputedHeaders({ useSnapshotDates: false }).value }); @@ -62,18 +83,13 @@ async function pollSendMessageStatus(chat_id: string, times: number, updateStatu currentChatMessages.value = messages.map(e => ({ ...e, charts: e.charts.map(k => JSON.parse(k)) })) as any; currentChatMessageDelta.value = ''; - - // currentChatMessages.value.push({ - // role: 'assistant', - // content: currentChatMessageDelta.value.replace(/\[data:.*?\]/g, ''), - // }); - } } async function sendMessage() { + if (loading.value) return; if (!project.value) return; @@ -100,14 +116,15 @@ async function sendMessage() { await new Promise(e => setTimeout(e, 200)); + + typer.start(); + await pollSendMessageStatus(res.chat_id, 0, status => { if (!status) return; if (status.length > 0) loading.value = false; currentChatMessageDelta.value = status; }); - - } catch (ex: any) { if (ex.message.includes('CHAT_LIMIT_REACHED')) { @@ -237,14 +254,14 @@ async function clearAllChats() {
-
+
{{ message.content }}
-
+
@@ -256,7 +273,6 @@ async function clearAllChats() { }" /> -
@@ -285,17 +301,26 @@ async function clearAllChats() {
+ +
+
-
- +
+ +
Loading
+
+
+
@@ -356,9 +381,10 @@ async function clearAllChats() {
History
- + Clear all - +
diff --git a/dashboard/server/ai/functions/AI_Sessions.ts b/dashboard/server/ai/functions/AI_Sessions.ts index 0f98906..beb01ed 100644 --- a/dashboard/server/ai/functions/AI_Sessions.ts +++ b/dashboard/server/ai/functions/AI_Sessions.ts @@ -8,7 +8,7 @@ const getSessionsCountsTool: AIPlugin_TTool<'getSessionsCount'> = { type: 'function', function: { name: 'getSessionsCount', - description: 'Gets the number of sessions received on a date range', + description: 'Gets the number of sessions (unique visitors) received on a date range', parameters: { type: 'object', properties: { @@ -83,4 +83,4 @@ export class AiSessions extends AIPlugin<['getSessionsCount', 'getSessionsTimeli } } -export const ASessionsInstance = new AiSessions(); +export const AiSessionsInstance = new AiSessions(); diff --git a/dashboard/server/services/AiService.ts b/dashboard/server/services/AiService.ts index 714db46..18b4b40 100644 --- a/dashboard/server/services/AiService.ts +++ b/dashboard/server/services/AiService.ts @@ -2,11 +2,11 @@ import OpenAI from "openai"; import { AiChatModel } from '@schema/ai/AiChatSchema'; -import { ProjectCountModel } from '@schema/project/ProjectsCounts'; import { ProjectLimitModel } from '@schema/project/ProjectsLimits'; import { AiEventsInstance } from '../ai/functions/AI_Events'; import { AiVisitsInstance } from '../ai/functions/AI_Visits'; +import { AiSessionsInstance } from '../ai/functions/AI_Sessions'; import { AiComposableChartInstance } from '../ai/functions/AI_ComposableChart'; const { AI_KEY, AI_ORG, AI_PROJECT } = useRuntimeConfig(); @@ -18,13 +18,15 @@ const openai = new OpenAI({ apiKey: AI_KEY, organization: AI_ORG, project: AI_PR const tools: OpenAI.Chat.Completions.ChatCompletionTool[] = [ ...AiVisitsInstance.getTools(), ...AiEventsInstance.getTools(), - ...AiComposableChartInstance.getTools() + ...AiSessionsInstance.getTools(), + ...AiComposableChartInstance.getTools(), ] const functions: any = { ...AiVisitsInstance.getHandlers(), ...AiEventsInstance.getHandlers(), + ...AiSessionsInstance.getHandlers(), ...AiComposableChartInstance.getHandlers() } @@ -188,7 +190,7 @@ export async function sendMessageOnChat(text: string, pid: string, time_offset: } else { const roleMessage: OpenAI.Chat.Completions.ChatCompletionMessageParam = { role: 'system', - content: "Today ISO date: " + new Date().toISOString() + content: "You are an AI Data Analyst and Growth Hacker specialized in helping users analyze data collected within Litlyx and providing strategies to grow their website, app, or business. Your scope is strictly limited to data creation, visualization, and growth-related advice. If a user asks something outside this domain, politely inform them that you are not designed to answer such questions. Today ISO date is " + new Date().toISOString() + "take this in count when the user ask relative dates" } messages.push(roleMessage); await addMessageToChat(roleMessage, chat_id);