update dependencies, add script to create RAG of a github repo, add API endpoint to query it, add chatbot page to interact with it
This commit is contained in:
@@ -1,5 +1,7 @@
|
||||
import { getModel } from '$lib/utils/search';
|
||||
import { building } from '$app/environment';
|
||||
import type { Handle } from '@sveltejs/kit';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
if (!building) {
|
||||
getModel().catch((error) => {
|
||||
@@ -8,3 +10,18 @@ if (!building) {
|
||||
|
||||
console.log('Model loaded successfully!');
|
||||
}
|
||||
|
||||
export const handle: Handle = async ({ event, resolve }) => {
|
||||
// Check for existing session
|
||||
let sessionId = event.cookies.get('sessionId');
|
||||
|
||||
// If no session exists, create a new one
|
||||
if (!sessionId) {
|
||||
sessionId = uuidv4();
|
||||
event.cookies.set('sessionId', sessionId, { path: '/', httpOnly: true, sameSite: 'strict', maxAge: 60 * 60 * 24 * 7 }); // 1 week
|
||||
}
|
||||
// Add sessionId to locals for easy access in routes
|
||||
event.locals = { ...event.locals, sessionId };
|
||||
|
||||
return resolve(event);
|
||||
};
|
||||
|
9
src/routes/ai/+layout.svelte
Normal file
9
src/routes/ai/+layout.svelte
Normal file
@@ -0,0 +1,9 @@
|
||||
<script lang="ts">
|
||||
import SearchResults from '$lib/components/SearchResults.svelte';
|
||||
import AppContainer from '$lib/components/scenes/app/AppContainer.svelte';
|
||||
</script>
|
||||
|
||||
<AppContainer>
|
||||
<slot />
|
||||
<SearchResults />
|
||||
</AppContainer>
|
2
src/routes/ai/+layout.ts
Normal file
2
src/routes/ai/+layout.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export const prerender = false;
|
||||
export const ssr = false;
|
141
src/routes/ai/+page.svelte
Normal file
141
src/routes/ai/+page.svelte
Normal file
@@ -0,0 +1,141 @@
|
||||
<script lang="ts">
|
||||
import '../../app.css';
|
||||
import type { SearchResult } from '$lib/utils/search';
|
||||
import { searchResults } from '$lib/store';
|
||||
import { onMount } from 'svelte';
|
||||
import { marked } from 'marked';
|
||||
import { HumanMessage, AIMessage } from '@langchain/core/messages';
|
||||
import type { ChatHistory } from '../api/ai/+server';
|
||||
import { PUBLIC_LOAD_DUMMY_HISTORY } from '$env/static/public';
|
||||
|
||||
let searchResultsValue: SearchResult[] = [];
|
||||
let query = '';
|
||||
let loading = false;
|
||||
function generateDummyHistory() {
|
||||
return [
|
||||
JSON.parse(JSON.stringify(new HumanMessage({ content: 'Hello, AI!' }))),
|
||||
JSON.parse(JSON.stringify(new AIMessage({ content: 'Hello! How can I assist you today?' }))),
|
||||
JSON.parse(JSON.stringify(new HumanMessage({ content: "What's the weather like?" }))),
|
||||
JSON.parse(
|
||||
JSON.stringify(
|
||||
new AIMessage({
|
||||
content:
|
||||
"I'm sorry, but I don't have access to real-time weather information. You might want to check a weather app or website for the most up-to-date forecast."
|
||||
})
|
||||
)
|
||||
)
|
||||
];
|
||||
}
|
||||
|
||||
let chatHistory: ChatHistory = [];
|
||||
|
||||
onMount(async () => {
|
||||
try {
|
||||
if (PUBLIC_LOAD_DUMMY_HISTORY === 'true') {
|
||||
chatHistory = generateDummyHistory();
|
||||
return;
|
||||
}
|
||||
const response = await fetch('/api/ai');
|
||||
const data = await response.json();
|
||||
chatHistory = data.chatHistory || [];
|
||||
} catch (error) {
|
||||
console.error('Error fetching chat history:', error);
|
||||
}
|
||||
});
|
||||
|
||||
searchResults.subscribe((value: SearchResult[]) => {
|
||||
searchResultsValue = value ? value : [];
|
||||
});
|
||||
|
||||
function getRoleAndContent(message: any): { role: string; content: string } {
|
||||
if (message.type === 'constructor') {
|
||||
const messageType = message.id[2];
|
||||
return {
|
||||
role: messageType.replace('Message', '').toLowerCase(),
|
||||
content: message.kwargs.content
|
||||
};
|
||||
}
|
||||
return {
|
||||
role: 'unknown',
|
||||
content: JSON.stringify(message)
|
||||
};
|
||||
}
|
||||
|
||||
// safely render markdown
|
||||
function renderMarkdown(content: string) {
|
||||
return marked(content);
|
||||
}
|
||||
|
||||
async function handleSubmit() {
|
||||
loading = true;
|
||||
try {
|
||||
const response = await fetch('/api/ai', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ query })
|
||||
});
|
||||
const data = await response.json();
|
||||
chatHistory = data.chatHistory || [];
|
||||
|
||||
query = '';
|
||||
} catch (error) {
|
||||
console.error('Error fetching AI response:', error);
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>silentsilas - AI</title>
|
||||
</svelte:head>
|
||||
|
||||
{#if searchResultsValue.length === 0}
|
||||
<div class="flex-grow flex-col overflow-auto p-4">
|
||||
<div class="space-y-4">
|
||||
{#each chatHistory as message}
|
||||
{@const { role, content } = getRoleAndContent(message)}
|
||||
{#if role === 'human'}
|
||||
<div class="chat chat-end">
|
||||
<div class="chat-bubble chat-bubble-primary">
|
||||
{@html renderMarkdown(content)}
|
||||
</div>
|
||||
</div>
|
||||
{:else}
|
||||
<div class="p-4 rounded-lg bg-base-300 prose md:container">
|
||||
{@html renderMarkdown(content)}
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
</div>
|
||||
|
||||
{#if loading}
|
||||
<div class="mt-4">
|
||||
<span class="loading loading-dots loading-lg"></span>
|
||||
</div>
|
||||
{/if}
|
||||
<form on:submit|preventDefault={handleSubmit} class="mt-4 flex-col">
|
||||
<label class="form-control">
|
||||
<div class="label">
|
||||
<span class="label-text">
|
||||
Querying the Authenticator <a
|
||||
href="https://git.silentsilas.com/silentsilas/Authenticator"
|
||||
target="_blank"
|
||||
class="link-primary">repository</a
|
||||
>
|
||||
</span>
|
||||
</div>
|
||||
<textarea
|
||||
bind:value={query}
|
||||
class="textarea textarea-bordered h-24"
|
||||
placeholder="Type your message here..."
|
||||
></textarea>
|
||||
</label>
|
||||
<button type="submit" class="btn btn-block btn-primary mt-2" disabled={loading}>
|
||||
Send
|
||||
</button>
|
||||
</form>
|
||||
</div>
|
||||
{/if}
|
82
src/routes/api/ai/+server.ts
Normal file
82
src/routes/api/ai/+server.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { json } from '@sveltejs/kit';
|
||||
import { FaissStore } from '@langchain/community/vectorstores/faiss';
|
||||
import { OpenAIEmbeddings } from '@langchain/openai';
|
||||
import { ChatAnthropic } from '@langchain/anthropic';
|
||||
import { RunnableSequence, RunnablePassthrough } from '@langchain/core/runnables';
|
||||
import { StringOutputParser } from '@langchain/core/output_parsers';
|
||||
import { ChatPromptTemplate } from '@langchain/core/prompts';
|
||||
import { join } from 'path';
|
||||
import { HumanMessage, AIMessage, SystemMessage } from '@langchain/core/messages';
|
||||
import type { RequestEvent } from '@sveltejs/kit';
|
||||
|
||||
const chatHistories: Record<string, ChatHistory> = {};
|
||||
|
||||
type VectorDocument = {
|
||||
pageContent: string;
|
||||
};
|
||||
|
||||
const formatDocumentsAsString = (documents: VectorDocument[]) => {
|
||||
return documents.map((doc) => doc.pageContent).join('\n\n');
|
||||
};
|
||||
|
||||
export type ChatHistory = HumanMessage[] | AIMessage[] | SystemMessage[];
|
||||
|
||||
export async function POST({ request, locals }: RequestEvent): Promise<Response> {
|
||||
const { query } = await request.json();
|
||||
const sessionId = locals.sessionId;
|
||||
|
||||
if (!chatHistories[sessionId]) {
|
||||
chatHistories[sessionId] = [];
|
||||
}
|
||||
const chatHistory = chatHistories[sessionId];
|
||||
|
||||
const directory = join(process.cwd(), 'vectorstore');
|
||||
const embeddings = new OpenAIEmbeddings();
|
||||
const vectorStore = await FaissStore.load(directory, embeddings);
|
||||
const vectorStoreRetriever = vectorStore.asRetriever();
|
||||
|
||||
const model = new ChatAnthropic({
|
||||
modelName: 'claude-3-5-sonnet-20240620',
|
||||
anthropicApiKey: process.env.ANTHROPIC_API_KEY
|
||||
});
|
||||
|
||||
const SYSTEM_TEMPLATE = `Use the following pieces of context to answer the question at the end.
|
||||
If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
||||
|
||||
----------------
|
||||
|
||||
Context:
|
||||
{context}
|
||||
|
||||
Question:
|
||||
{question}`;
|
||||
|
||||
const prompt = ChatPromptTemplate.fromMessages([
|
||||
['system', SYSTEM_TEMPLATE],
|
||||
...chatHistory,
|
||||
['user', '{question}']
|
||||
]);
|
||||
|
||||
const chain = RunnableSequence.from([
|
||||
{
|
||||
context: vectorStoreRetriever.pipe(formatDocumentsAsString),
|
||||
question: new RunnablePassthrough(),
|
||||
},
|
||||
prompt,
|
||||
model,
|
||||
new StringOutputParser()
|
||||
]);
|
||||
|
||||
const answer = await chain.invoke(query);
|
||||
|
||||
chatHistory.push(new HumanMessage({ content: query }));
|
||||
chatHistory.push(new AIMessage({ content: answer }));
|
||||
|
||||
return json({ response: answer, chatHistory });
|
||||
}
|
||||
|
||||
export async function GET({ locals }): Promise<Response> {
|
||||
const sessionId = locals.sessionId;
|
||||
const chatHistory = chatHistories[sessionId] || [];
|
||||
return json({ chatHistory });
|
||||
}
|
@@ -1,6 +1,4 @@
|
||||
<script lang="ts">
|
||||
import Footer from '$lib/components/Footer.svelte';
|
||||
import NavBar from '$lib/components/NavBar.svelte';
|
||||
import SearchResults from '$lib/components/SearchResults.svelte';
|
||||
import AppContainer from '$lib/components/scenes/app/AppContainer.svelte';
|
||||
</script>
|
||||
|
Reference in New Issue
Block a user