Skip to content
This repository was archived by the owner on Sep 15, 2024. It is now read-only.

Commit

Permalink
Simplify [LLM Api] OpenAI (#272)
Browse files Browse the repository at this point in the history
* Refactor [LLM Api] [OpenAI] simplify JSON response

- [+] refactor(openai.ts): simplify JSON response handling and error handling in ChatGPTApi class

* Remove Custom Model to avoid plagiarism claim by someone

- [+] refactor(app/constant.ts): remove dall-e-2-beta-instruct-vision and dall-e-3-beta-instruct-vision from DEFAULT_MODELS array

* Fix JSON Response in DALL-E Models

- [+] refactor(openai.ts): remove unused variables and simplify response processing
- [+] style(openai.ts): remove trailing spaces in if condition
- [+] fix(openai.ts): add return statement to fix JSON response handling
  • Loading branch information
H0llyW00dzZ authored Feb 10, 2024
1 parent 0370604 commit c442418
Show file tree
Hide file tree
Showing 2 changed files with 23 additions and 120 deletions.
125 changes: 23 additions & 102 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -293,9 +293,6 @@ export class ChatGPTApi implements LLMApi {

controller.signal.onabort = finish;

const isApp = !!getClientConfig()?.isApp;
const apiPath = "api/openai/";

fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
Expand All @@ -305,119 +302,43 @@ export class ChatGPTApi implements LLMApi {

if (contentType?.startsWith("text/plain")) {
responseText = await res.clone().text();
} else if (contentType?.startsWith("application/json")
&& magicPayload.isDalle) { // only dall-e
} else if (contentType?.startsWith("application/json")) {
const jsonResponse = await res.clone().json();
const imageUrl = jsonResponse.data?.[0]?.url;
const prompt = requestPayloads.image.prompt;
const revised_prompt = jsonResponse.data?.[0]?.revised_prompt;
const index = requestPayloads.image.n - 1;
const size = requestPayloads.image.size;
const InstrucModel = defaultModel.endsWith("-vision");

if (defaultModel.includes("dall-e-3")) {
const imageDescription = `| ![${revised_prompt}](${imageUrl}) |\n|---|\n| Size: ${size} |\n| [Download Here](${imageUrl}) |\n| 🎩 🪄 Revised Prompt (${index + 1}): ${revised_prompt} |\n| 🤖 AI Models: ${defaultModel} |`;

responseText = `${imageDescription}`;
} else {
const imageDescription = `#### ${prompt} (${index + 1})\n\n\n | ![${imageUrl}](${imageUrl}) |\n|---|\n| Size: ${size} |\n| [Download Here](${imageUrl}) |\n| 🤖 AI Models: ${defaultModel} |`;

responseText = `${imageDescription}`;
}

if (InstrucModel) {
const instructx = await fetch(
(isApp ? DEFAULT_API_HOST : apiPath) + OpenaiPath.ChatPath, // Pass the path parameter
{
method: "POST",
body: JSON.stringify({
messages: [
...messages,
],
model: "gpt-4-vision-preview",
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
// have to add this max_tokens for dall-e instruct
max_tokens: modelConfig.max_tokens,
}),
headers: getHeaders(),
// Generic JSON response handling
if (jsonResponse.data) {
if (magicPayload.isDalle) {
// Specific handling for DALL·E responses
const imageUrl = jsonResponse.data?.[0]?.url;
const prompt = requestPayloads.image.prompt;
const revised_prompt = jsonResponse.data?.[0]?.revised_prompt;
const index = requestPayloads.image.n - 1;
const size = requestPayloads.image.size;


let imageDescription = `#### ${prompt} (${index + 1})\n\n\n | ![${imageUrl}](${imageUrl}) |\n|---|\n| Size: ${size} |\n| [Download Here](${imageUrl}) |\n| 🤖 AI Models: ${defaultModel} |`;
if (defaultModel.includes("dall-e-3")) {
imageDescription = `| ![${revised_prompt}](${imageUrl}) |\n|---|\n| Size: ${size} |\n| [Download Here](${imageUrl}) |\n| 🎩 🪄 Revised Prompt (${index + 1}): ${revised_prompt} |\n| 🤖 AI Models: ${defaultModel} |`;
}
);
clearTimeout(requestTimeoutId);
const instructxx = await instructx.json();

const instructionDelta = instructxx.choices?.[0]?.message?.content;
const instructionPayload = {
messages: [
...messages,
{
role: "system",
content: instructionDelta,
},
],
model: "gpt-4-vision-preview",
temperature: modelConfig.temperature,
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
max_tokens: modelConfig.max_tokens,
};

const instructionResponse = await fetch(
(isApp ? DEFAULT_API_HOST : apiPath) + OpenaiPath.ChatPath,
{
method: "POST",
body: JSON.stringify(instructionPayload),
headers: getHeaders(),
}
);

const instructionJson = await instructionResponse.json();
const instructionMessage = instructionJson.choices?.[0]?.message?.content; // Access the appropriate property containing the message
const imageDescription = `| ![${prompt}](${imageUrl}) |\n|---|\n| Size: ${size} |\n| [Download Here](${imageUrl}) |\n| 🤖 AI Models: ${defaultModel} |`;

responseText = `${imageDescription}\n\n${instructionMessage}`;
}

if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
let anyinfo = await res.clone().text();
try {
const infJson = await res.clone().json();
anyinfo = prettyObject(infJson);
} catch { }
if (res.status === 401) {
responseText = "\n\n" + Locale.Error.Unauthorized;
responseText = `${imageDescription}`;
}
if (res.status !== 200) {
if (anyinfo) {
responseText += "\n\n" + anyinfo;
}
}
return;
return; // this should be fix json response, unlike go so easy
}
}
// Handle non-OK responses or unexpected content types
if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
} catch { }

const responseTexts = [responseText];
if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}
Expand Down Expand Up @@ -446,9 +367,9 @@ export class ChatGPTApi implements LLMApi {
remainText += delta;
}

if (textmoderation
&& textmoderation.length > 0
&& provider === ServiceProvider.Azure) {
if (textmoderation
&& textmoderation.length > 0
&& provider === ServiceProvider.Azure) {
const contentFilterResults = textmoderation?.[0]?.content_filter_results;
console.log(`[${provider}] [Text Moderation] flagged categories result:`, contentFilterResults);
}
Expand Down
18 changes: 0 additions & 18 deletions app/constant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -153,24 +153,6 @@ export const DEFAULT_MODELS = [
providerType: "openai",
},
},
{
name: "dall-e-2-beta-instruct-vision",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "dall-e-3-beta-instruct-vision",
available: true,
provider: {
id: "openai",
providerName: "OpenAI",
providerType: "openai",
},
},
{
name: "gpt-4",
available: true,
Expand Down

0 comments on commit c442418

Please sign in to comment.