Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chat: smoother provider gemini (fixes #8187) #8188

Merged
merged 3 commits into from
Feb 5, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion chatapi/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
"@types/express": "^4.17.17",
"@types/ws": "^8.5.6",
"@types/textract": "^2.4.5",
"@google/generative-ai": "^0.3.0",
"cors": "^2.8.5",
"dotenv": "^16.1.4",
"express": "^4.18.2",
Expand Down
8 changes: 5 additions & 3 deletions chatapi/src/config/ai-providers.config.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
/* eslint-disable no-console */
import { GoogleGenerativeAI } from '@google/generative-ai';
import OpenAI from 'openai';

import { configurationDB } from './nano.config';
import { ModelsDocument } from '../models/ai-providers.model';
import { ModelsDocument } from '../models/chat.model';

let keys: Record<string, any> = {};
let models: Record<string, any> = {};
Expand Down Expand Up @@ -42,7 +41,10 @@ const initialize = async () => {
'apiKey': doc?.keys.deepseek || '',
'baseURL': 'https://api.deepseek.com',
}),
'gemini': new GoogleGenerativeAI(doc?.keys.gemini || '')
'gemini': new OpenAI({
'apiKey': doc?.keys.gemini || '',
'baseURL': 'https://generativelanguage.googleapis.com/v1beta/openai/',
})
};

models = {
Expand Down
4 changes: 0 additions & 4 deletions chatapi/src/models/chat-item.model.ts

This file was deleted.

9 changes: 0 additions & 9 deletions chatapi/src/models/chat-message.model.ts

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,24 +1,35 @@
type ProviderName = 'openai' | 'perplexity' | 'gemini';
type ProviderName = 'openai' | 'perplexity' | 'deepseek' | 'gemini';

export interface AIProvider {
name: ProviderName;
model?: string;
}

interface Assistant {
name: string;
instructions: string;
}

interface Providers {
openai?: string;
perplexity?: string;
deepseek?: string;
gemini?: string;
}

interface Assistant {
name: string;
instructions: string;
}

export interface ModelsDocument {
models: Providers;
keys: Providers;
assistant?: Assistant;
}

export interface ChatMessage {
role: 'user' | 'assistant';
content: string;
}

export interface ChatItem {
query: string;
response: string;
}

9 changes: 2 additions & 7 deletions chatapi/src/services/chat.service.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
import { DocumentInsertResponse } from 'nano';

import { chatDB } from '../config/nano.config';
import { aiChat } from '../utils/chat.utils';
import { retrieveChatHistory } from '../utils/db.utils';
import { aiChat } from '../utils/chat.utils';
import { handleChatError } from '../utils/chat-error.utils';
import { AIProvider } from '../models/ai-providers.model';
import { ChatMessage } from '../models/chat-message.model';
import { AIProvider, ChatMessage } from '../models/chat.model';

/**
* Create a chat conversation & save in couchdb
Expand All @@ -26,10 +25,6 @@ export async function chat(data: any, stream?: boolean, callback?: (response: st
throw new Error('"data.content" is a required non-empty string field');
}

if (stream && aiProvider.name === 'gemini') {
throw new Error('Streaming not supported on Gemini');
}

if (dbData._id) {
await retrieveChatHistory(dbData, messages);
} else {
Expand Down
99 changes: 25 additions & 74 deletions chatapi/src/utils/chat-helpers.utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { keys } from '../config/ai-providers.config';
import { models } from '../config/ai-providers.config';
import { AIProvider } from '../models/ai-providers.model';
import { ChatMessage, GeminiMessage } from '../models/chat-message.model';
import { AIProvider, ChatMessage } from '../models/chat.model';
import { Attachment } from '../models/db-doc.model';
import { fetchFileFromCouchDB } from './db.utils';
import {
Expand All @@ -15,42 +14,6 @@ import {
} from './chat-assistant.utils';
import { extractTextFromDocument } from './text-extraction.utils';

/**
* Uses geminis's multimodal endpoint to generate chat completions
* @param messages - Array of chat messages
* @param model - Gemini model to use for completions
* @returns Completion text
*/
async function handleGemini(
messages: ChatMessage[],
model: string
): Promise<string> {
const geminiModel = keys.gemini.getGenerativeModel({ model });

const msg = messages[messages.length - 1].content;

const geminiMessages: GeminiMessage[] = messages.map((message) => ({
'role': message.role === 'assistant' ? 'model' : message.role,
'parts': [ { 'text': message.content } ],
}));

geminiMessages.pop();

const chat = geminiModel.startChat({
'history': geminiMessages,
'generationConfig': {
'maxOutputTokens': 100,
},
});

const result = await chat.sendMessage(msg);
const response = await result.response;
const completionText = response.text();

return completionText;
}


/**
* Uses openai's completions endpoint to generate chat completions with streaming enabled
* @param messages - Array of chat messages
Expand Down Expand Up @@ -85,30 +48,24 @@ export async function aiChatStream(
}
}

if (aiProvider.name === 'gemini') {
return handleGemini(messages, model);
} else if ('chat' in provider.ai) {
const completion = await provider.ai.chat.completions.create({
model,
messages,
'stream': true,
});

let completionText = '';
for await (const chunk of completion) {
if (chunk.choices && chunk.choices.length > 0) {
const response = chunk.choices[0].delta?.content || '';
completionText += response;
if (callback) {
callback(response);
}
const completion = await provider.ai.chat.completions.create({
model,
messages,
'stream': true,
});

let completionText = '';
for await (const chunk of completion) {
if (chunk.choices && chunk.choices.length > 0) {
const response = chunk.choices[0].delta?.content || '';
completionText += response;
if (callback) {
callback(response);
}
}

return completionText;
} else {
throw new Error('Provider does not support chat completions');
}

return completionText;
}


Expand Down Expand Up @@ -159,21 +116,15 @@ export async function aiChatNonStream(
}
}

if (aiProvider.name === 'gemini') {
return handleGemini(messages, model);
} else if ('chat' in provider.ai) {
const completion = await provider.ai.chat.completions.create({
model,
messages,
});

const completionText = completion.choices[0]?.message?.content;
if (!completionText) {
throw new Error('Unexpected API response');
}
const completion = await provider.ai.chat.completions.create({
model,
messages,
});

return completionText;
} else {
throw new Error('Provider does not support chat completions');
const completionText = completion.choices[0]?.message?.content;
if (!completionText) {
throw new Error('Unexpected API response');
}

return completionText;
}
3 changes: 1 addition & 2 deletions chatapi/src/utils/chat.utils.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { aiChatStream, aiChatNonStream } from './chat-helpers.utils';

import { AIProvider } from '../models/ai-providers.model';
import { ChatMessage } from '../models/chat-message.model';
import { AIProvider, ChatMessage } from '../models/chat.model';

export async function aiChat(
messages: ChatMessage[],
Expand Down
2 changes: 1 addition & 1 deletion chatapi/src/utils/db.utils.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { chatDB, resourceDB } from '../config/nano.config';
import { DbDoc } from '../models/db-doc.model';
import { ChatMessage } from '../models/chat-message.model';
import { ChatMessage } from '../models/chat.model';

/**
* Retrieves chat history from CouchDB for a given document ID.
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "planet",
"license": "AGPL-3.0",
"version": "0.16.96",
"version": "0.16.97",
"myplanet": {
"latest": "v0.22.75",
"min": "v0.21.75"
Expand Down
Loading