From bfd602b3697ed1a80cd41c11dfc9b0ea58d5a450 Mon Sep 17 00:00:00 2001 From: fegloff Date: Sun, 20 Aug 2023 00:05:56 -0500 Subject: [PATCH 1/4] add ask35 logic + add/delete message to end typing status --- src/bot.ts | 1 + src/modules/open-ai/api/openAi.ts | 20 +++++++++++++++++--- src/modules/open-ai/controller/index.ts | 6 +++--- src/modules/open-ai/index.ts | 19 +++++++++++++++++-- 4 files changed, 38 insertions(+), 8 deletions(-) diff --git a/src/bot.ts b/src/bot.ts index ccd52198..b5dc324d 100644 --- a/src/bot.ts +++ b/src/bot.ts @@ -342,6 +342,7 @@ bot.catch((err) => { logger.error(`Error while handling update ${ctx.update.update_id}:`); const e = err.error; if (e instanceof GrammyError) { + console.log(e) logger.error("Error in request:", e.description); } else if (e instanceof HttpError) { logger.error("Could not contact Telegram:", e); diff --git a/src/modules/open-ai/api/openAi.ts b/src/modules/open-ai/api/openAi.ts index 073e32ca..55f0a69b 100644 --- a/src/modules/open-ai/api/openAi.ts +++ b/src/modules/open-ai/api/openAi.ts @@ -161,7 +161,15 @@ export const streamChatCompletion = async ( stream: true, }; let completion = ""; - let msgId = 0; + let msgId = ( + await ctx.reply( + `_${ctx.session.openAi.chatGpt.model.toLocaleUpperCase()}_`, + { + parse_mode: "Markdown", + } + ) + ).message_id; + ctx.chatAction = "typing"; return new Promise(async (resolve, reject) => { const res = await openai.createChatCompletion( payload as CreateChatCompletionRequest, @@ -181,11 +189,17 @@ export const streamChatCompletion = async ( if (!completion.endsWith(".")) { if (msgId === 0) { msgId = (await ctx.reply(completion)).message_id; + resolve(completion); + return; } } await ctx.api .editMessageText(ctx.chat?.id!, msgId, completion) .catch((e: any) => console.log(e)); + const msgIdEnd = (await ctx.reply('_done_', { + parse_mode: "Markdown", + })).message_id; + ctx.api.deleteMessage(ctx.chat?.id!,msgIdEnd) resolve(completion); return; } @@ -197,8 +211,8 @@ export const streamChatCompletion = async ( : ""; if (parsed.choices[0].delta.content === ".") { if (msgId === 0) { - msgId = (await ctx.reply(completion)).message_id; - ctx.chatAction = "typing"; + // msgId = (await ctx.reply(completion)).message_id; + // ctx.chatAction = "typing"; } else { completion = completion.replaceAll("..", ""); completion += ".."; diff --git a/src/modules/open-ai/controller/index.ts b/src/modules/open-ai/controller/index.ts index 8c45cea1..b54ddfd2 100644 --- a/src/modules/open-ai/controller/index.ts +++ b/src/modules/open-ai/controller/index.ts @@ -140,8 +140,8 @@ export const promptGen = async (data: ChatGptPayload) => { return price } return 0 - } catch (e) { - logger.error("promptGen Error", e); - throw "There was an error while generating the image"; + } catch (e: any) { + logger.error(`promptGen Error ${e.toString()}`); + throw e; } }; diff --git a/src/modules/open-ai/index.ts b/src/modules/open-ai/index.ts index 79c5567e..230dc22a 100644 --- a/src/modules/open-ai/index.ts +++ b/src/modules/open-ai/index.ts @@ -11,6 +11,7 @@ import { alterImg, imgGen, imgGenEnhanced, promptGen } from "./controller"; import { Logger, pino } from "pino"; import { appText } from "./utils/text"; import { chatService } from "../../database/services"; +import { ChatGPTModelsEnum } from "./types"; export const SupportedCommands = { // chat: { @@ -23,6 +24,11 @@ export const SupportedCommands = { groupParams: ">0", privateParams: ">0", }, + ask35: { + name: "ask35", + groupParams: ">0", + privateParams: ">0", + }, gpt4: { name: "gpt4", groupParams: ">0", @@ -219,6 +225,13 @@ export class OpenAIBot { // } if (ctx.hasCommand(SupportedCommands.ask.name)) { + ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_4 + await this.onChat(ctx); + return; + } + + if (ctx.hasCommand(SupportedCommands.ask35.name)) { + ctx.session.openAi.chatGpt.model = ChatGPTModelsEnum.GPT_35_TURBO await this.onChat(ctx); return; } @@ -354,7 +367,9 @@ export class OpenAIBot { await ctx.reply(msg, { parse_mode: "Markdown" }); return; } - + // if (chatConversation.length === 0) { + // ctx.reply(`_Using model ${ctx.session.openAi.chatGpt.model}_`,{ parse_mode: "Markdown" }) + // } chatConversation.push({ role: "user", content: `${this.hasPrefix(prompt) ? prompt.slice(1) : prompt}.`, @@ -382,7 +397,7 @@ export class OpenAIBot { } } catch (error: any) { ctx.chatAction = null; - this.logger.error(error.toString()); + this.logger.error(`onChat: ${error.toString()}`); await ctx.reply("Error handling your request"); } } From 7fddbabb371fef5360578fbaee95f25a5d105195 Mon Sep 17 00:00:00 2001 From: fegloff Date: Sun, 20 Aug 2023 22:26:50 -0500 Subject: [PATCH 2/4] change that help handling list --- src/modules/open-ai/api/openAi.ts | 8 ++++++-- src/modules/payment/index.ts | 4 +++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/modules/open-ai/api/openAi.ts b/src/modules/open-ai/api/openAi.ts index 55f0a69b..b4f04cc8 100644 --- a/src/modules/open-ai/api/openAi.ts +++ b/src/modules/open-ai/api/openAi.ts @@ -175,6 +175,7 @@ export const streamChatCompletion = async ( payload as CreateChatCompletionRequest, { responseType: "stream" } ); + let wordCount = 0 //@ts-ignore res.data.on("data", async (data: any) => { const lines = data @@ -196,7 +197,7 @@ export const streamChatCompletion = async ( await ctx.api .editMessageText(ctx.chat?.id!, msgId, completion) .catch((e: any) => console.log(e)); - const msgIdEnd = (await ctx.reply('_done_', { + const msgIdEnd = (await ctx.reply(`_done with ${ctx.session.openAi.chatGpt.model.toLocaleUpperCase()}_`, { parse_mode: "Markdown", })).message_id; ctx.api.deleteMessage(ctx.chat?.id!,msgIdEnd) @@ -204,7 +205,9 @@ export const streamChatCompletion = async ( return; } try { + wordCount++ const parsed = JSON.parse(message); + // console.log(parsed.choices[0].delta.content, wordCount) completion += parsed.choices[0].delta.content !== undefined ? parsed.choices[0].delta.content @@ -213,9 +216,10 @@ export const streamChatCompletion = async ( if (msgId === 0) { // msgId = (await ctx.reply(completion)).message_id; // ctx.chatAction = "typing"; - } else { + } else if (wordCount > 15) { completion = completion.replaceAll("..", ""); completion += ".."; + wordCount = 0 ctx.api .editMessageText(ctx.chat?.id!, msgId, completion) .catch((e: any) => console.log(e)); diff --git a/src/modules/payment/index.ts b/src/modules/payment/index.ts index f0771cb4..b6ecb112 100644 --- a/src/modules/payment/index.ts +++ b/src/modules/payment/index.ts @@ -81,7 +81,8 @@ export class BotPayments { } public toONE(amount: BigNumber, roundCeil = true) { - const value = this.web3.utils.fromWei(amount.toFixed(), 'ether') + console.log(amount, amount.toFixed()) + const value = this.web3.utils.fromWei(amount.toFixed(0), 'ether') if(roundCeil) { return Math.ceil(+value) } @@ -346,6 +347,7 @@ export class BotPayments { const freeCredits = await chatService.getBalance(accountId) const addressBalance = await this.getAddressBalance(account.address); const balance = addressBalance.plus(freeCredits) + console.log('balance',balance) const balanceOne = this.toONE(balance, false); ctx.reply( `Your credits in ONE tokens: ${balanceOne.toFixed(2)} From 39d4c03121e631a5e25c6c47544312b95d88438d Mon Sep 17 00:00:00 2001 From: fegloff Date: Sun, 20 Aug 2023 23:35:28 -0500 Subject: [PATCH 3/4] delete completion working message and create a new message --- src/modules/open-ai/api/openAi.ts | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/src/modules/open-ai/api/openAi.ts b/src/modules/open-ai/api/openAi.ts index b4f04cc8..e638abb6 100644 --- a/src/modules/open-ai/api/openAi.ts +++ b/src/modules/open-ai/api/openAi.ts @@ -153,9 +153,10 @@ export const streamChatCompletion = async ( try { const payload = { model: model, - max_tokens: limitTokens - ? config.openAi.imageGen.completions.maxTokens - : undefined, + max_tokens: 800, + // limitTokens + // ? config.openAi.imageGen.completions.maxTokens + // : undefined, temperature: config.openAi.imageGen.completions.temperature, messages: conversation, stream: true, @@ -175,7 +176,7 @@ export const streamChatCompletion = async ( payload as CreateChatCompletionRequest, { responseType: "stream" } ); - let wordCount = 0 + let wordCount = 0; //@ts-ignore res.data.on("data", async (data: any) => { const lines = data @@ -197,15 +198,19 @@ export const streamChatCompletion = async ( await ctx.api .editMessageText(ctx.chat?.id!, msgId, completion) .catch((e: any) => console.log(e)); - const msgIdEnd = (await ctx.reply(`_done with ${ctx.session.openAi.chatGpt.model.toLocaleUpperCase()}_`, { - parse_mode: "Markdown", - })).message_id; - ctx.api.deleteMessage(ctx.chat?.id!,msgIdEnd) + // const msgIdEnd = ( + // await ctx.reply(`_done_`, { + // // with ${ctx.session.openAi.chatGpt.model.toLocaleUpperCase()} + // parse_mode: "Markdown", + // }) + // ).message_id; + ctx.api.deleteMessage(ctx.chat?.id!, msgId); // msgIdEnd); + ctx.reply(completion); resolve(completion); return; } try { - wordCount++ + wordCount++; const parsed = JSON.parse(message); // console.log(parsed.choices[0].delta.content, wordCount) completion += @@ -216,10 +221,10 @@ export const streamChatCompletion = async ( if (msgId === 0) { // msgId = (await ctx.reply(completion)).message_id; // ctx.chatAction = "typing"; - } else if (wordCount > 15) { + } else if (wordCount > 30) { completion = completion.replaceAll("..", ""); completion += ".."; - wordCount = 0 + wordCount = 0; ctx.api .editMessageText(ctx.chat?.id!, msgId, completion) .catch((e: any) => console.log(e)); From 6fd26f31eaf7e1499e2a30eaa3584d9127618fbd Mon Sep 17 00:00:00 2001 From: Francisco Egloff Date: Mon, 21 Aug 2023 00:07:57 -0500 Subject: [PATCH 4/4] Update index.ts --- src/modules/payment/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/modules/payment/index.ts b/src/modules/payment/index.ts index b6ecb112..08ac7b1b 100644 --- a/src/modules/payment/index.ts +++ b/src/modules/payment/index.ts @@ -82,7 +82,7 @@ export class BotPayments { public toONE(amount: BigNumber, roundCeil = true) { console.log(amount, amount.toFixed()) - const value = this.web3.utils.fromWei(amount.toFixed(0), 'ether') + const value = this.web3.utils.fromWei(amount.toFixed(), 'ether') if(roundCeil) { return Math.ceil(+value) }