Skip to content

Commit

Permalink
🚧 Work in progress: new webapp samples with Streamlit
Browse files Browse the repository at this point in the history
  • Loading branch information
k33g committed Feb 13, 2025
1 parent 641a944 commit 57e44a0
Show file tree
Hide file tree
Showing 12 changed files with 368 additions and 3 deletions.
6 changes: 3 additions & 3 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
{
"workbench.iconTheme": "material-icon-theme",
"workbench.colorTheme": "Ayu Green Mirage Bordered",
"editor.fontSize": 14,
"terminal.integrated.fontSize": 14,
"workbench.colorTheme": "Idea intellij light theme",
"editor.fontSize": 13,
"terminal.integrated.fontSize": 13,
"editor.insertSpaces": true,
"editor.tabSize": 4,
"editor.detectIndentation": true,
Expand Down
Empty file.
15 changes: 15 additions & 0 deletions examples/69-web-chat-bot/backend/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
FROM golang:1.23.1-alpine AS builder
WORKDIR /app
COPY main.go .
COPY go.mod .

RUN <<EOF
go mod tidy
go build -o web-chat-bot
EOF

FROM scratch
WORKDIR /app
COPY --from=builder /app/web-chat-bot .

CMD ["./web-chat-bot"]
6 changes: 6 additions & 0 deletions examples/69-web-chat-bot/backend/go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
module 69-web-chat-bot

go 1.23.1

require github.com/parakeet-nest/parakeet v0.2.4

Empty file.
154 changes: 154 additions & 0 deletions examples/69-web-chat-bot/backend/main.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
package main

import (
"encoding/json"
"errors"
"fmt"
"log"
"net/http"
"os"

"github.com/google/uuid"
"github.com/parakeet-nest/parakeet/completion"
"github.com/parakeet-nest/parakeet/enums/option"
"github.com/parakeet-nest/parakeet/history"
"github.com/parakeet-nest/parakeet/llm"
)

/*
GetBytesBody returns the body of an HTTP request as a []byte.
- It takes a pointer to an http.Request as a parameter.
- It returns a []byte.
*/
func GetBytesBody(request *http.Request) []byte {
body := make([]byte, request.ContentLength)
request.Body.Read(body)
return body
}

func main() {

ollamaUrl := os.Getenv("OLLAMA_BASE_URL")
if ollamaUrl == "" {
ollamaUrl = "http://localhost:11434"
}

model := os.Getenv("LLM_CHAT")
if model == "" {
model = "deepseek-r1:1.5b"
}

var httpPort = os.Getenv("HTTP_PORT")
if httpPort == "" {
httpPort = "5050"
}

fmt.Println("🌍", ollamaUrl, "📕", model)

options := llm.SetOptions(map[string]interface{}{
option.Temperature: 0.5,
option.RepeatLastN: 2,
option.RepeatPenalty: 2.2,
option.TopK: 10,
option.TopP: 0.5,
})

systemInstructions := `You are a useful AI agent, your name is Bob`

conversation := history.MemoryMessages{
Messages: make(map[string]llm.MessageRecord),
}

mux := http.NewServeMux()
shouldIStopTheCompletion := false

mux.HandleFunc("POST /chat", func(response http.ResponseWriter, request *http.Request) {
// add a flusher
flusher, ok := response.(http.Flusher)
if !ok {
response.Write([]byte("😡 Error: expected http.ResponseWriter to be an http.Flusher"))
}
body := GetBytesBody(request)
// unmarshal the json data
var data map[string]string

err := json.Unmarshal(body, &data)
if err != nil {
response.Write([]byte("😡 Error: " + err.Error()))
}

userMessage := data["message"]
previousMessages, _ := conversation.GetAllMessages()

// (Re)Create the conversation
conversationMessages := []llm.Message{}
// instruction
conversationMessages = append(conversationMessages, llm.Message{Role: "system", Content: systemInstructions})
// history
conversationMessages = append(conversationMessages, previousMessages...)
// last question
conversationMessages = append(conversationMessages, llm.Message{Role: "user", Content: userMessage})

fmt.Println("🅰:", conversationMessages)


query := llm.Query{
Model: model,
Messages: conversationMessages,
Options: options,
}
/*
query := llm.Query{
Model: model,
Messages: []llm.Message{
{Role: "system", Content: systemInstructions},
{Role: "user", Content: userMessage},
},
Options: options,
}
*/



answer, err := completion.ChatStream(ollamaUrl, query,
func(answer llm.Answer) error {
log.Println("📝:", answer.Message.Content)
response.Write([]byte(answer.Message.Content))

flusher.Flush()
if !shouldIStopTheCompletion {
return nil
} else {
return errors.New("🚫 Cancelling request")
}
})

if err != nil {
shouldIStopTheCompletion = false
response.Write([]byte("bye: " + err.Error()))
}

conversation.SaveMessage(uuid.New().String(), llm.Message{
Role: "user",
Content: userMessage,
})
conversation.SaveMessage(uuid.New().String(), llm.Message{
Role: "system",
Content: answer.Message.Content,
})

})

// Cancel/Stop the generation of the completion
mux.HandleFunc("DELETE /cancel", func(response http.ResponseWriter, request *http.Request) {
shouldIStopTheCompletion = true
response.Write([]byte("🚫 Cancelling request..."))
})

var errListening error
log.Println("🌍 http server is listening on: " + httpPort)
errListening = http.ListenAndServe(":"+httpPort, mux)

log.Fatal(errListening)

}
43 changes: 43 additions & 0 deletions examples/69-web-chat-bot/compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
services:
# docker compose -f compose.local.yml up --build

download-local-llm:
image: curlimages/curl:8.6.0
entrypoint: ["curl", "http://host.docker.internal:11434/api/pull", "-d", '{"name": "qwen2.5:3b"}']


backend:
build:
context: ./backend
dockerfile: Dockerfile
environment:
- OLLAMA_BASE_URL=http://host.docker.internal:11434
- LLM_CHAT=qwen2.5:3b
depends_on:
download-local-llm:
condition: service_completed_successfully
develop:
watch:
- action: rebuild
path: ./backend/main.go


frontend:
build:
context: ./frontend
dockerfile: Dockerfile
ports:
- 9090:8501
environment:
- BACKEND_SERVICE_URL=http://backend:5050
- PAGE_TITLE=🙂🤓🥸 We are Bob!
- PAGE_HEADER=We are legion 🤖🤖🤖
- PAGE_ICON=🤖
depends_on:
- backend
develop:
watch:
- action: rebuild
path: ./frontend/app.py


2 changes: 2 additions & 0 deletions examples/69-web-chat-bot/frontend/.streamlit/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[browser]
gatherUsageStats = false
13 changes: 13 additions & 0 deletions examples/69-web-chat-bot/frontend/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
FROM python:3.9-slim

WORKDIR /app

COPY requirements.txt .

RUN pip install -r requirements.txt

COPY . .

EXPOSE 8501

CMD ["streamlit", "run", "app.py"]
128 changes: 128 additions & 0 deletions examples/69-web-chat-bot/frontend/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import streamlit as st
import requests
import os
from datetime import datetime

PAGE_TITLE = os.environ.get('PAGE_TITLE', 'Web Chat Bot demo')
PAGE_HEADER = os.environ.get('PAGE_HEADER', 'Made with Streamlit and Parakeet')

PAGE_ICON = os.environ.get('PAGE_ICON', '🚀')

# Configuration of the Streamlit page
st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON)

# Hide the Deploy button
st.markdown("""
<style>
.stDeployButton {
visibility: hidden;
}
</style>
""", unsafe_allow_html=True)

# Initialisation of the session state
if "messages" not in st.session_state:
st.session_state.messages = []

# Handle the reset of the input key
if "input_key" not in st.session_state:
st.session_state.input_key = 0

# Backend URL (the nodejs server)
#BACKEND_SERVICE_URL = "http://backend:5050"

BACKEND_SERVICE_URL = os.environ.get('BACKEND_SERVICE_URL', 'http://backend:5050')

def stream_response(message):
"""Stream the message response from the backend"""
try:
with requests.post(
BACKEND_SERVICE_URL+"/chat",
json={"message": message},
headers={"Content-Type": "application/json"},
stream=True
) as response:
# Create a placeholder for the streaming response
response_placeholder = st.empty()
full_response = ""

# Stream the response chunks
for chunk in response.iter_content(chunk_size=1024, decode_unicode=True):
if chunk:
chunk_text = chunk.decode('utf-8') if isinstance(chunk, bytes) else chunk
full_response += chunk_text
# Update the placeholder with the accumulated response
response_placeholder.markdown(full_response)

return full_response
except requests.exceptions.RequestException as e:
error_msg = f"😡 Connection error: {str(e)}"
st.error(error_msg)
return error_msg

def increment_input_key():
"""Increment the input key to reset the input field"""
st.session_state.input_key += 1

# Page title
st.title(PAGE_TITLE)
st.header(PAGE_HEADER)

# Form to send a message
with st.form(key=f"message_form_{st.session_state.input_key}"):
#message = st.text_input("📝 Your message:", key=f"input_{st.session_state.input_key}")
message = st.text_area("📝 Your message:", key=f"input_{st.session_state.input_key}", height=150)
#submit_button = st.form_submit_button(label="Send...")
#cancel_button = st.form_submit_button(label="Cancel", type="secondary")
col1, col2, col3, col4, col5, col6 = st.columns(6)
with col1:
submit_button = st.form_submit_button(label="Send...")
with col6:
cancel_button = st.form_submit_button(label="Cancel", type="secondary")
# Handle the message submission
if submit_button and message and len(message.strip()) > 0:
# Add the message to the history
st.session_state.messages.append({
"role": "user",
"content": message,
"time": datetime.now()
})

# Stream the response from the backend
response = stream_response(message)

# Add the response to the history
st.session_state.messages.append({
"role": "assistant",
"content": response,
"time": datetime.now()
})

# Reset the input field
increment_input_key()
st.rerun()

# Handle the message submission and cancellation
if cancel_button:
try:
response = requests.delete(f"{BACKEND_SERVICE_URL}/cancel")
if response.status_code == 200:
st.success("Request cancelled successfully")
else:
st.error("Failed to cancel request")
except requests.exceptions.RequestException as e:
st.error(f"Error cancelling request: {str(e)}")



# Display the messages history
st.write("### Messages history")
for msg in reversed(st.session_state.messages):
with st.container():
if msg["role"] == "user":
st.info(f"🤓 You ({msg['time'].strftime('%H:%M')})")
st.write(msg["content"])
else:
st.success(f"🤖 Assistant ({msg['time'].strftime('%H:%M')})")
st.write(msg["content"])

2 changes: 2 additions & 0 deletions examples/69-web-chat-bot/frontend/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
streamlit==1.31.1
requests==2.31.0
2 changes: 2 additions & 0 deletions go.work
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,8 @@ use (
./examples/67-mcp/mcp-server
./examples/68-deepseek-r1

./examples/69-web-chat-bot/backend

examples/90-characters
examples/91-characters

Expand Down

0 comments on commit 57e44a0

Please sign in to comment.