Skip to content
Snippets Groups Projects
Commit 507dde16 authored by echicken's avatar echicken :chicken:
Browse files

Response formatting

parent b0c16f17
No related branches found
No related tags found
No related merge requests found
import express, { Express, Request, Response } from 'express'; import type { format } from './lib/ai.js';
import express, { Express, NextFunction, Request, Response } from 'express';
import bodyParser from 'body-parser'; import bodyParser from 'body-parser';
import { init, queryAgent } from './lib/ai.js'; import { init, queryAgent } from './lib/ai.js';
const app: Express = express(); async function query(req: Request, res: Response, next: NextFunction) {
if (req.params.query === undefined) return next();
app.use(bodyParser.urlencoded({ extended: false })); if (req.params.format !== undefined && req.params.format !== 'plain' && req.params.format !== 'markdown') {
app.use(bodyParser.json()); res.sendStatus(400);
return;
app.get('/query/:query', async (req: Request, res: Response) => { }
const format = req.params.format as format;
try { try {
const answer = await queryAgent({ query: req.params.query }); const answer = await queryAgent({
query: req.params.query,
format,
});
if (answer === undefined) { if (answer === undefined) {
res.sendStatus(404); res.sendStatus(404);
} else { } else {
...@@ -18,7 +23,15 @@ app.get('/query/:query', async (req: Request, res: Response) => { ...@@ -18,7 +23,15 @@ app.get('/query/:query', async (req: Request, res: Response) => {
} catch (err) { } catch (err) {
console.error('Error on query', req.params.query, err); console.error('Error on query', req.params.query, err);
} }
}); }
const app: Express = express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
app.get('/query/:query', query);
app.get('/query/:query/:format', query);
app.get('/', (req: Request, res: Response) => { app.get('/', (req: Request, res: Response) => {
res.sendStatus(404); res.sendStatus(404);
......
...@@ -3,19 +3,7 @@ import type { BaseToolWithCall, EngineResponse, Metadata, NodeWithScore, Related ...@@ -3,19 +3,7 @@ import type { BaseToolWithCall, EngineResponse, Metadata, NodeWithScore, Related
import { CallbackManager, CompactAndRefine, Document, IngestionPipeline, MetadataMode, OpenAI, OpenAIAgent, OpenAIEmbedding, QdrantVectorStore, QueryEngine, QueryEngineTool, RelevancyEvaluator, ResponseSynthesizer, RetrieverQueryEngine, Settings, SimpleNodeParser, TextQaPrompt, VectorStoreIndex } from 'llamaindex'; import { CallbackManager, CompactAndRefine, Document, IngestionPipeline, MetadataMode, OpenAI, OpenAIAgent, OpenAIEmbedding, QdrantVectorStore, QueryEngine, QueryEngineTool, RelevancyEvaluator, ResponseSynthesizer, RetrieverQueryEngine, Settings, SimpleNodeParser, TextQaPrompt, VectorStoreIndex } from 'llamaindex';
import config from './config.js'; import config from './config.js';
// configure LLM export type format = 'plain' | 'markdown' | undefined;
Settings.llm = new OpenAI({ model: config.llm }) as any;
// configure embedding model
Settings.embedModel = new OpenAIEmbedding({
model: config.embeddingModel,
dimensions: config.embeddingDimension,
});
const vectorStore = new QdrantVectorStore({
collectionName: config.qdrantCollection,
url: config.qdrantUrl,
embedModel: Settings.embedModel,
});
interface Source { interface Source {
title: string, title: string,
...@@ -31,6 +19,20 @@ interface Answer { ...@@ -31,6 +19,20 @@ interface Answer {
}, },
}; };
// configure LLM
Settings.llm = new OpenAI({ model: config.llm }) as any;
// configure embedding model
Settings.embedModel = new OpenAIEmbedding({
model: config.embeddingModel,
dimensions: config.embeddingDimension,
});
const vectorStore = new QdrantVectorStore({
collectionName: config.qdrantCollection,
url: config.qdrantUrl,
embedModel: Settings.embedModel,
});
const newTextQaPrompt: TextQaPrompt = ({ context, query }): string => { const newTextQaPrompt: TextQaPrompt = ({ context, query }): string => {
return `Context:\r\n${context}\r\n\r\n---\r\n\r\nQuestion:\r\n${query}\r\n\r\n---\r\n\r\nResponse:\r\n`; return `Context:\r\n${context}\r\n\r\n---\r\n\r\nQuestion:\r\n${query}\r\n\r\n---\r\n\r\nResponse:\r\n`;
} }
...@@ -59,13 +61,8 @@ async function getQueryEngineTools(): Promise<BaseToolWithCall[]> { ...@@ -59,13 +61,8 @@ async function getQueryEngineTools(): Promise<BaseToolWithCall[]> {
return [queryEngineTool]; return [queryEngineTool];
} }
async function getAgent(): Promise<OpenAIAgent> { async function getAgent(format?: format): Promise<OpenAIAgent> {
const tools = await getQueryEngineTools(); let systemPrompt = (
const agent = new OpenAIAgent({
llm: Settings.llm,
tools,
verbose: true,
systemPrompt: (
'You are a polite, friendly, helpful technical support assistant for system operators of Synchronet BBS.\r\n' 'You are a polite, friendly, helpful technical support assistant for system operators of Synchronet BBS.\r\n'
+ 'Your job is to answer questions about installing, configuring, customizing, operating, troubleshooting, and using Synchronet BBS.\r\n' + 'Your job is to answer questions about installing, configuring, customizing, operating, troubleshooting, and using Synchronet BBS.\r\n'
+ 'You should rovide step-by-step instructions when possible and be detailed in your responses.\r\n' + 'You should rovide step-by-step instructions when possible and be detailed in your responses.\r\n'
...@@ -74,7 +71,19 @@ async function getAgent(): Promise<OpenAIAgent> { ...@@ -74,7 +71,19 @@ async function getAgent(): Promise<OpenAIAgent> {
+ 'DO NOT repeat the question in your response.\r\n' + 'DO NOT repeat the question in your response.\r\n'
+ 'You MUST answer the question using only the provided context information and NOT any prior knowledge.\r\n' + 'You MUST answer the question using only the provided context information and NOT any prior knowledge.\r\n'
+ 'If no answer can be found in the context information, you MUST respond with the phrase "Answer unavailable" and nothing else.\r\n' + 'If no answer can be found in the context information, you MUST respond with the phrase "Answer unavailable" and nothing else.\r\n'
), );
if (format === 'plain') {
systemPrompt += 'Use plain text only in your response. DO NOT include any HTML in your response except when being used as a code example.';
} else if (format === 'markdown') {
systemPrompt += 'Format your response with markdown, using bold, italics, underline, code blocks with syntax highlighting, block quotes, ordered and unordered lists where applicable.';
}
const tools = await getQueryEngineTools();
const agent = new OpenAIAgent({
llm: Settings.llm,
tools,
verbose: true,
systemPrompt,
}); });
return agent; return agent;
} }
...@@ -136,9 +145,9 @@ export async function query({ query }: { query: string }): Promise<Answer | unde ...@@ -136,9 +145,9 @@ export async function query({ query }: { query: string }): Promise<Answer | unde
return answer; return answer;
} }
export async function queryAgent({ query }: { query: string }): Promise<Answer | undefined> { export async function queryAgent({ query, format }: { query: string, format?: format }): Promise<Answer | undefined> {
console.debug(`Querying agent with: ${query}`); console.debug(`Querying agent with: ${query}`);
const agent = await getAgent(); const agent = await getAgent(format);
const callbackManager = new CallbackManager(); // https://github.com/run-llama/LlamaIndexTS/issues/1015 const callbackManager = new CallbackManager(); // https://github.com/run-llama/LlamaIndexTS/issues/1015
const sourceNodes = new Promise<NodeWithScore<Metadata>[]>((res) => { // Let's just get out of callback-land as quickly as possible eh? const sourceNodes = new Promise<NodeWithScore<Metadata>[]>((res) => { // Let's just get out of callback-land as quickly as possible eh?
callbackManager.on('retrieve-end', (data) => { callbackManager.on('retrieve-end', (data) => {
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment