Skip to content
This repository was archived by the owner on Sep 15, 2024. It is now read-only.

Refactor ChatGPT LLMApi #120

Merged
merged 7 commits into from
Nov 18, 2023
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Chore ChatGPT LLMApi
[+] chore(openai.ts): add getServiceProvider() function to retrieve the service provider
[+] chore(openai.ts): update console.log statements to include the service provider information
  • Loading branch information
H0llyW00dzZ committed Nov 18, 2023
commit 98bf064a64d9177093c7c098fd10bd45fc7403e4
34 changes: 24 additions & 10 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,26 @@ export class ChatGPTApi implements LLMApi {
}
}


/** get service provider stuff
* Author :
* @H0llyW00dzZ
* this function will be great for static types, so we don't have to put lots of function just for check service provider
**/

private getServiceProvider(): string {
const accessStore = useAccessStore.getState();
let provider = "";

if (accessStore.provider === ServiceProvider.Azure) {
provider = "Azure";
} else if (accessStore.provider === ServiceProvider.OpenAI) {
provider = "OpenAI";
}

return provider;
}

async chat(options: ChatOptions) {
const textmoderation = useAppConfig.getState().textmoderation;
const latest = OpenaiPath.TextModerationModels.latest;
Expand Down Expand Up @@ -227,18 +247,19 @@ export class ChatGPTApi implements LLMApi {
* Author : @H0llyW00dzZ
**/
const magicPayload = this.getNewStuff(defaultModel);
const provider = this.getServiceProvider();

if (defaultModel.startsWith("dall-e")) {
console.log("[Request] openai payload: ", {
console.log(`[Request] ${provider} payload: `, {
image: requestPayloads.image,
});
} else if (magicPayload.isNewModel) {
console.log("[Request] openai payload: ", {
console.log(`[Request] ${provider} payload: `, {
chat: requestPayloads.chat,
});
} else {
const { max_tokens, ...oldChatPayload } = requestPayloads.chat;
console.log("[Request] openai payload: ", {
console.log(`[Request] ${provider} payload: `, {
chat: oldChatPayload,
});
}
Expand Down Expand Up @@ -311,13 +332,6 @@ export class ChatGPTApi implements LLMApi {
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get("content-type");
const accessStore = useAccessStore.getState();
let provider = "";
if (accessStore.provider === ServiceProvider.Azure) {
provider = "Azure";
} else if (accessStore.provider === ServiceProvider.OpenAI) {
provider = "OpenAI";
}
console.log(`[ServiceProvider] [${provider}] request response content type: `, contentType);

if (contentType?.startsWith("text/plain")) {
Expand Down