Skip to content

Commit

Permalink
move ai to v0 routes
Browse files Browse the repository at this point in the history
  • Loading branch information
AyushAgrawal-A2 committed Jan 9, 2025
1 parent b664f27 commit 6ec90ff
Show file tree
Hide file tree
Showing 40 changed files with 391 additions and 340 deletions.
2 changes: 2 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
"cSpell.words": [
"actix",
"autoclean",
"autoincrement",
"awscli",
"awscliv",
"ayush",
Expand All @@ -25,6 +26,7 @@
"dpkg",
"elif",
"endregion",
"expressjwt",
"finitize",
"Fuzzysort",
"GETCELL",
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,17 +1,21 @@
import Anthropic from '@anthropic-ai/sdk';
import type { Response } from 'express';
import {
getAnthropicApiArgs,
parseAnthropicResponse,
parseAnthropicStream,
} from 'quadratic-api/src/ai/helpers/anthropic.helper';
import { ANTHROPIC_API_KEY } from 'quadratic-api/src/env-vars';
import { getModelOptions } from 'quadratic-shared/ai/helpers/model.helper';
import type { AIAutoCompleteRequestBody, AIMessagePrompt, AnthropicModel } from 'quadratic-shared/typesAndSchemasAI';
import { ANTHROPIC_API_KEY } from '../../env-vars';
import { getAnthropicApiArgs, parseAnthropicResponse, parseAnthropicStream } from './helpers/anthropic.helper';
import type { AIMessagePrompt, AIRequestBody, AnthropicModel } from 'quadratic-shared/typesAndSchemasAI';

const anthropic = new Anthropic({
apiKey: ANTHROPIC_API_KEY,
});

export const handleAnthropicRequest = async (
model: AnthropicModel,
args: Omit<AIAutoCompleteRequestBody, 'model'>,
args: Omit<AIRequestBody, 'model'>,
response: Response
): Promise<AIMessagePrompt | undefined> => {
const { system, messages, tools, tool_choice } = getAnthropicApiArgs(args);
Expand All @@ -34,7 +38,7 @@ export const handleAnthropicRequest = async (
response.setHeader('Cache-Control', 'no-cache');
response.setHeader('Connection', 'keep-alive');

const responseMessage = await parseAnthropicStream(chunks, response);
const responseMessage = await parseAnthropicStream(chunks, response, model);
return responseMessage;
} catch (error: any) {
if (!response.headersSent) {
Expand All @@ -61,7 +65,7 @@ export const handleAnthropicRequest = async (
tools,
tool_choice,
});
const responseMessage = parseAnthropicResponse(result, response);
const responseMessage = parseAnthropicResponse(result, response, model);
return responseMessage;
} catch (error: any) {
if (error instanceof Anthropic.APIError) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,19 @@ import { AnthropicBedrock } from '@anthropic-ai/bedrock-sdk';
import Anthropic from '@anthropic-ai/sdk';
import { BedrockRuntimeClient, ConverseCommand, ConverseStreamCommand } from '@aws-sdk/client-bedrock-runtime';
import { type Response } from 'express';
import { getModelOptions, isBedrockAnthropicModel } from 'quadratic-shared/ai/helpers/model.helper';
import {
type AIAutoCompleteRequestBody,
type AIMessagePrompt,
type BedrockModel,
} from 'quadratic-shared/typesAndSchemasAI';
import { AWS_S3_ACCESS_KEY_ID, AWS_S3_REGION, AWS_S3_SECRET_ACCESS_KEY } from '../../env-vars';
import { getAnthropicApiArgs, parseAnthropicResponse, parseAnthropicStream } from './helpers/anthropic.helper';
import { getBedrockApiArgs, parseBedrockResponse, parseBedrockStream } from './helpers/bedrock.helper';
getAnthropicApiArgs,
parseAnthropicResponse,
parseAnthropicStream,
} from 'quadratic-api/src/ai/helpers/anthropic.helper';
import {
getBedrockApiArgs,
parseBedrockResponse,
parseBedrockStream,
} from 'quadratic-api/src/ai/helpers/bedrock.helper';
import { AWS_S3_ACCESS_KEY_ID, AWS_S3_REGION, AWS_S3_SECRET_ACCESS_KEY } from 'quadratic-api/src/env-vars';
import { getModelOptions, isBedrockAnthropicModel } from 'quadratic-shared/ai/helpers/model.helper';
import type { AIMessagePrompt, AIRequestBody, BedrockModel } from 'quadratic-shared/typesAndSchemasAI';

// aws-sdk for bedrock, generic for all models
const bedrock = new BedrockRuntimeClient({
Expand All @@ -27,7 +31,7 @@ const bedrock_anthropic = new AnthropicBedrock({

export const handleBedrockRequest = async (
model: BedrockModel,
args: Omit<AIAutoCompleteRequestBody, 'model'>,
args: Omit<AIRequestBody, 'model'>,
response: Response
): Promise<AIMessagePrompt | undefined> => {
const { stream, temperature, max_tokens } = getModelOptions(model, args);
Expand All @@ -51,7 +55,7 @@ export const handleBedrockRequest = async (
response.setHeader('Cache-Control', 'no-cache');
response.setHeader('Connection', 'keep-alive');

const responseMessage = await parseAnthropicStream(chunks, response);
const responseMessage = await parseAnthropicStream(chunks, response, model);
return responseMessage;
} catch (error: any) {
if (!response.headersSent) {
Expand All @@ -77,7 +81,7 @@ export const handleBedrockRequest = async (
tools,
tool_choice,
});
const responseMessage = parseAnthropicResponse(result, response);
const responseMessage = parseAnthropicResponse(result, response, model);
return responseMessage;
} catch (error: any) {
if (error instanceof Anthropic.APIError) {
Expand Down Expand Up @@ -111,7 +115,7 @@ export const handleBedrockRequest = async (
response.setHeader('Cache-Control', 'no-cache');
response.setHeader('Connection', 'keep-alive');

const responseMessage = await parseBedrockStream(chunks, response);
const responseMessage = await parseBedrockStream(chunks, response, model);
return responseMessage;
} catch (error: any) {
if (!response.headersSent) {
Expand Down Expand Up @@ -141,7 +145,7 @@ export const handleBedrockRequest = async (
});

const result = await bedrock.send(command);
const responseMessage = parseBedrockResponse(result.output, response);
const responseMessage = parseBedrockResponse(result.output, response, model);
return responseMessage;
} catch (error: any) {
if (error.response) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,17 @@
import { type Response } from 'express';
import OpenAI from 'openai';
import { getOpenAIApiArgs, parseOpenAIResponse, parseOpenAIStream } from 'quadratic-api/src/ai/helpers/openai.helper';
import { OPENAI_API_KEY } from 'quadratic-api/src/env-vars';
import { getModelOptions } from 'quadratic-shared/ai/helpers/model.helper';
import {
type AIAutoCompleteRequestBody,
type AIMessagePrompt,
type OpenAIModel,
} from 'quadratic-shared/typesAndSchemasAI';
import { OPENAI_API_KEY } from '../../env-vars';
import { getOpenAIApiArgs, parseOpenAIResponse, parseOpenAIStream } from './helpers/openai.helper';
import type { AIMessagePrompt, AIRequestBody, OpenAIModel } from 'quadratic-shared/typesAndSchemasAI';

const openai = new OpenAI({
apiKey: OPENAI_API_KEY || '',
});

export const handleOpenAIRequest = async (
model: OpenAIModel,
args: Omit<AIAutoCompleteRequestBody, 'model'>,
args: Omit<AIRequestBody, 'model'>,
response: Response
): Promise<AIMessagePrompt | undefined> => {
const { messages, tools, tool_choice } = getOpenAIApiArgs(args);
Expand All @@ -36,7 +32,7 @@ export const handleOpenAIRequest = async (
response.setHeader('Cache-Control', 'no-cache');
response.setHeader('Connection', 'keep-alive');

const responseMessage = await parseOpenAIStream(completion, response);
const responseMessage = await parseOpenAIStream(completion, response, model);
return responseMessage;
} catch (error: any) {
if (!response.headersSent) {
Expand All @@ -60,7 +56,7 @@ export const handleOpenAIRequest = async (
tools,
tool_choice,
});
const responseMessage = parseOpenAIResponse(result, response);
const responseMessage = parseOpenAIResponse(result, response, model);
return responseMessage;
} catch (error: any) {
if (error instanceof OpenAI.APIError) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@ import { getSystemPromptMessages } from 'quadratic-shared/ai/helpers/message.hel
import type { AITool } from 'quadratic-shared/ai/specs/aiToolsSpec';
import { aiToolsSpec } from 'quadratic-shared/ai/specs/aiToolsSpec';
import type {
AIAutoCompleteRequestBody,
AIMessagePrompt,
AnthropicAutoCompleteRequestBody,
AIRequestBody,
AnthropicModel,
AnthropicPromptMessage,
AnthropicRequestBody,
AnthropicTool,
AnthropicToolChoice,
BedrockAnthropicModel,
} from 'quadratic-shared/typesAndSchemasAI';

export function getAnthropicApiArgs(
args: Omit<AIAutoCompleteRequestBody, 'model'>
): Omit<AnthropicAutoCompleteRequestBody, 'model'> {
export function getAnthropicApiArgs(args: Omit<AIRequestBody, 'model'>): Omit<AnthropicRequestBody, 'model'> {
const { messages: chatMessages, useTools, toolName } = args;

const { systemMessages, promptMessages } = getSystemPromptMessages(chatMessages);
Expand Down Expand Up @@ -116,13 +116,15 @@ function getAnthropicToolChoice(useTools?: boolean, name?: AITool): AnthropicToo

export async function parseAnthropicStream(
chunks: Stream<Anthropic.Messages.RawMessageStreamEvent>,
response: Response
response: Response,
model: AnthropicModel | BedrockAnthropicModel
) {
const responseMessage: AIMessagePrompt = {
role: 'assistant',
content: '',
contextType: 'userPrompt',
toolCalls: [],
model,
};

for await (const chunk of chunks) {
Expand Down Expand Up @@ -186,12 +188,17 @@ export async function parseAnthropicStream(
return responseMessage;
}

export function parseAnthropicResponse(result: Anthropic.Messages.Message, response: Response): AIMessagePrompt {
export function parseAnthropicResponse(
result: Anthropic.Messages.Message,
response: Response,
model: AnthropicModel | BedrockAnthropicModel
): AIMessagePrompt {
const responseMessage: AIMessagePrompt = {
role: 'assistant',
content: '',
contextType: 'userPrompt',
toolCalls: [],
model,
};

result.content?.forEach(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@ import { getSystemPromptMessages } from 'quadratic-shared/ai/helpers/message.hel
import type { AITool } from 'quadratic-shared/ai/specs/aiToolsSpec';
import { aiToolsSpec } from 'quadratic-shared/ai/specs/aiToolsSpec';
import type {
AIAutoCompleteRequestBody,
AIMessagePrompt,
BedrockAutoCompleteRequestBody,
AIRequestBody,
BedrockModel,
BedrockPromptMessage,
BedrockRequestBody,
BedrockTool,
BedrockToolChoice,
} from 'quadratic-shared/typesAndSchemasAI';

export function getBedrockApiArgs(
args: Omit<AIAutoCompleteRequestBody, 'model'>
): Omit<BedrockAutoCompleteRequestBody, 'model'> {
export function getBedrockApiArgs(args: Omit<AIRequestBody, 'model'>): Omit<BedrockRequestBody, 'model'> {
const { messages: chatMessages, useTools, toolName } = args;

const { systemMessages, promptMessages } = getSystemPromptMessages(chatMessages);
Expand Down Expand Up @@ -114,12 +113,17 @@ function getBedrockToolChoice(useTools?: boolean, name?: AITool): BedrockToolCho
return toolChoice;
}

export async function parseBedrockStream(chunks: AsyncIterable<ConverseStreamOutput> | never[], response: Response) {
export async function parseBedrockStream(
chunks: AsyncIterable<ConverseStreamOutput> | never[],
response: Response,
model: BedrockModel
) {
const responseMessage: AIMessagePrompt = {
role: 'assistant',
content: '',
contextType: 'userPrompt',
toolCalls: [],
model,
};

for await (const chunk of chunks) {
Expand Down Expand Up @@ -185,12 +189,17 @@ export async function parseBedrockStream(chunks: AsyncIterable<ConverseStreamOut
return responseMessage;
}

export function parseBedrockResponse(result: ConverseOutput | undefined, response: Response): AIMessagePrompt {
export function parseBedrockResponse(
result: ConverseOutput | undefined,
response: Response,
model: BedrockModel
): AIMessagePrompt {
const responseMessage: AIMessagePrompt = {
role: 'assistant',
content: '',
contextType: 'userPrompt',
toolCalls: [],
model,
};

result?.message?.content?.forEach((contentBlock) => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { ConnectionDocs } from 'quadratic-api/src/ai/docs/ConnectionDocs';
import { FormulaDocs } from 'quadratic-api/src/ai/docs/FormulaDocs';
import { JavascriptDocs } from 'quadratic-api/src/ai/docs/JavascriptDocs';
import { PythonDocs } from 'quadratic-api/src/ai/docs/PythonDocs';
import { QuadraticDocs } from 'quadratic-api/src/ai/docs/QuadraticDocs';
import { aiToolsSpec } from 'quadratic-shared/ai/specs/aiToolsSpec';
import type { ChatMessage, CodeCellType } from 'quadratic-shared/typesAndSchemasAI';
import { ConnectionDocs } from '../docs/ConnectionDocs';
import { FormulaDocs } from '../docs/FormulaDocs';
import { JavascriptDocs } from '../docs/JavascriptDocs';
import { PythonDocs } from '../docs/PythonDocs';
import { QuadraticDocs } from '../docs/QuadraticDocs';

export const getQuadraticContext = (language?: CodeCellType): ChatMessage[] => [
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,16 @@ import { getSystemPromptMessages } from 'quadratic-shared/ai/helpers/message.hel
import type { AITool } from 'quadratic-shared/ai/specs/aiToolsSpec';
import { aiToolsSpec } from 'quadratic-shared/ai/specs/aiToolsSpec';
import type {
AIAutoCompleteRequestBody,
AIMessagePrompt,
OpenAIAutoCompleteRequestBody,
AIRequestBody,
OpenAIModel,
OpenAIPromptMessage,
OpenAIRequestBody,
OpenAITool,
OpenAIToolChoice,
} from 'quadratic-shared/typesAndSchemasAI';

export function getOpenAIApiArgs(
args: Omit<AIAutoCompleteRequestBody, 'model'>
): Omit<OpenAIAutoCompleteRequestBody, 'model'> {
export function getOpenAIApiArgs(args: Omit<AIRequestBody, 'model'>): Omit<OpenAIRequestBody, 'model'> {
const { messages: chatMessages, useTools, toolName } = args;

const { systemMessages, promptMessages } = getSystemPromptMessages(chatMessages);
Expand Down Expand Up @@ -108,13 +107,15 @@ function getOpenAIToolChoice(useTools?: boolean, name?: AITool): OpenAIToolChoic

export async function parseOpenAIStream(
chunks: Stream<OpenAI.Chat.Completions.ChatCompletionChunk>,
response: Response
response: Response,
model: OpenAIModel
) {
const responseMessage: AIMessagePrompt = {
role: 'assistant',
content: '',
contextType: 'userPrompt',
toolCalls: [],
model,
};

for await (const chunk of chunks) {
Expand Down Expand Up @@ -193,13 +194,15 @@ export async function parseOpenAIStream(

export function parseOpenAIResponse(
result: OpenAI.Chat.Completions.ChatCompletion,
response: Response
response: Response,
model: OpenAIModel
): AIMessagePrompt {
const responseMessage: AIMessagePrompt = {
role: 'assistant',
content: '',
contextType: 'userPrompt',
toolCalls: [],
model,
};

const message = result.choices[0].message;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import rateLimit from 'express-rate-limit';
import { RATE_LIMIT_AI_REQUESTS_MAX, RATE_LIMIT_AI_WINDOW_MS } from '../../env-vars';
import type { Request } from '../../types/Request';
import { RATE_LIMIT_AI_REQUESTS_MAX, RATE_LIMIT_AI_WINDOW_MS } from 'quadratic-api/src/env-vars';
import type { Request } from 'quadratic-api/src/types/Request';

export const ai_rate_limiter = rateLimit({
windowMs: Number(RATE_LIMIT_AI_WINDOW_MS) || 3 * 60 * 60 * 1000, // 3 hours
Expand Down
7 changes: 2 additions & 5 deletions quadratic-api/src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,9 @@ import 'express-async-errors';
import fs from 'fs';
import helmet from 'helmet';
import path from 'path';
import internal_router from 'quadratic-api/src/routes/internal';
import { ApiError } from 'quadratic-api/src/utils/ApiError';
import { CORS, NODE_ENV, SENTRY_DSN } from './env-vars';
import ai_router from './routes/ai/ai';
import internal_router from './routes/internal';
import { ApiError } from './utils/ApiError';
export const app = express();

// Configure Sentry
Expand Down Expand Up @@ -68,8 +67,6 @@ app.get('/', (req, res) => {
});

// App routes
// TODO: eventually move all of these into the `v0` directory and register them dynamically
app.use('/ai', ai_router);
// Internal routes
app.use('/v0/internal', internal_router);

Expand Down
Loading

0 comments on commit 6ec90ff

Please sign in to comment.