From 4933525f09b343e7afcd8258e28f47d10f4efc5c Mon Sep 17 00:00:00 2001 From: "@k33g" Date: Sat, 8 Feb 2025 17:39:39 +0100 Subject: [PATCH] =?UTF-8?q?=F0=9F=92=BE=20Saved.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .vscode/settings.json | 2 +- LAST_RELEASE.md | 2 + README.md | 282 ++++++++++++++++++ examples/08-embeddings-memory/main.go | 56 ++-- examples/15-mistral-function-calling/go.mod | 4 +- examples/15-mistral-function-calling/go.sum | 2 + examples/17-fake-function-calling/go.mod | 4 +- examples/17-fake-function-calling/go.sum | 2 + examples/52-constraints/go.mod | 2 + examples/52-constraints/go.sum | 2 + examples/67-mcp/go.mod | 4 +- examples/67-mcp/go.sum | 2 + examples/67-mcp/mcp-server/go.mod | 2 +- examples/67-mcp/mcp-server/go.sum | 4 +- examples/68-deepseek-r1/.env | 5 + examples/68-deepseek-r1/README.md | 11 + examples/68-deepseek-r1/go.mod | 10 + examples/68-deepseek-r1/go.sum | 2 + examples/68-deepseek-r1/iris-database.xml | 135 +++++++++ examples/68-deepseek-r1/iris-instructions.md | 67 +++++ examples/68-deepseek-r1/main.go | 129 ++++++++ .../68-deepseek-r1/system-instructions.md | 1 + go.work | 1 + 23 files changed, 697 insertions(+), 34 deletions(-) create mode 100644 examples/15-mistral-function-calling/go.sum create mode 100644 examples/17-fake-function-calling/go.sum create mode 100644 examples/52-constraints/go.sum create mode 100644 examples/68-deepseek-r1/.env create mode 100644 examples/68-deepseek-r1/README.md create mode 100644 examples/68-deepseek-r1/go.mod create mode 100644 examples/68-deepseek-r1/go.sum create mode 100644 examples/68-deepseek-r1/iris-database.xml create mode 100644 examples/68-deepseek-r1/iris-instructions.md create mode 100644 examples/68-deepseek-r1/main.go create mode 100644 examples/68-deepseek-r1/system-instructions.md diff --git a/.vscode/settings.json b/.vscode/settings.json index 76d0dbc..bc5d3ae 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,6 @@ { "workbench.iconTheme": "material-icon-theme", - "workbench.colorTheme": "Alt Catppuccin Frappé", + "workbench.colorTheme": "Ayu Green Mirage Bordered", "editor.fontSize": 14, "terminal.integrated.fontSize": 14, "editor.insertSpaces": true, diff --git a/LAST_RELEASE.md b/LAST_RELEASE.md index 1f2d8a8..73f64db 100644 --- a/LAST_RELEASE.md +++ b/LAST_RELEASE.md @@ -13,6 +13,8 @@ Improving the RAG example with Elasticsearch: `40-rag-with-elastic-markdown` ( - Structured output: `66-structured-outputs` - Experiments with Hypothetical Document Embeddings (HyDE): `65-hyde` (🚧 this is a work in progress) +- MCP Client: `67-mcp` +- How to use DeepSeek R1 (`1.5b`): `68-deepseek-r1` ### Error management diff --git a/README.md b/README.md index fc37b94..704512e 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,288 @@ Parakeet is the simplest Go library to create **GenAI apps** with **[Ollama](htt go get github.com/parakeet-nest/parakeet ``` +## Some examples +## Chat with streaming +```golang +ollamaUrl := "http://localhost:11434" +model := "deepseek-coder" +systemContent := `You are an expert in computer programming. +Please make friendly answer for the noobs. +Add source code examples if you can.` + +userContent := `Ccreate a "hello world" program in Golang.` + +options := llm.SetOptions(map[string]interface{}{ + option.Temperature: 0.5, + option.RepeatLastN: 2, + option.RepeatPenalty: 2.2, +}) + +query := llm.Query{ + Model: model, + Messages: []llm.Message{ + {Role: "system", Content: systemContent}, + {Role: "user", Content: userContent}, + }, + Options: options, +} + +_, err := completion.ChatStream(ollamaUrl, query, + func(answer llm.Answer) error { + fmt.Print(answer.Message.Content) + return nil + }) +``` + +## Tools (function calling) + +```golang + +ollamaUrl := "http://localhost:11434" +model := "allenporter/xlam:1b" + +toolsList := []llm.Tool{ + { + Type: "function", + Function: llm.Function{ + Name: "multiplyNumbers", + Description: "Make a multiplication of the two given numbers", + Parameters: llm.Parameters{ + Type: "object", + Properties: map[string]llm.Property{ + "a": { + Type: "number", + Description: "first operand", + }, + "b": { + Type: "number", + Description: "second operand", + }, + }, + Required: []string{"a", "b"}, + }, + }, + }, + { + Type: "function", + Function: llm.Function{ + Name: "addNumbers", + Description: "Make an addition of the two given numbers", + Parameters: llm.Parameters{ + Type: "object", + Properties: map[string]llm.Property{ + "a": { + Type: "number", + Description: "first operand", + }, + "b": { + Type: "number", + Description: "second operand", + }, + }, + Required: []string{"a", "b"}, + }, + }, + }, +} + +messages := []llm.Message{ + {Role: "user", Content: `add 2 and 40`}, + {Role: "user", Content: `multiply 2 and 21`}, +} + +options := llm.SetOptions(map[string]interface{}{ + option.Temperature: 0.0, + option.RepeatLastN: 2, + option.RepeatPenalty: 2.0, +}) + +query := llm.Query{ + Model: model, + Messages: messages, + Tools: toolsList, + Options: options, + Format: "json", +} + +answer, err := completion.Chat(ollamaUrl, query) +if err != nil { + log.Fatal("😡:", err) +} + +for idx, toolCall := range answer.Message.ToolCalls { + result, err := toolCall.Function.ToJSONString() + if err != nil { + log.Fatal("😡:", err) + } + // displqy the tool to call + fmt.Println("ToolCall", idx, ":", result) + + /* Results: + ToolCall 0 : {"name":"addNumbers","arguments":{"a":2,"b":40}} + ToolCall 1 : {"name":"multiplyNumbers","arguments":{"a":2,"b":21}} + */ +} +``` + +## Structured Output + +```golang +ollamaUrl := "http://localhost:11434" +model := "qwen2.5:0.5b" + +options := llm.SetOptions(map[string]interface{}{ + option.Temperature: 1.5, +}) + +// define schema for a structured output +schema := map[string]any{ + "type": "object", + "properties": map[string]any{ + "name": map[string]any{ + "type": "string", + }, + "capital": map[string]any{ + "type": "string", + }, + "languages": map[string]any{ + "type": "array", + "items": map[string]any{ + "type": "string", + }, + }, + }, + "required": []string{"name", "capital", "languages"}, +} + +query := llm.Query{ + Model: model, + Messages: []llm.Message{ + {Role: "user", Content: "Tell me about Canada."}, + }, + Options: options, + Format: schema, + Raw: false, +} + +answer, err := completion.Chat(ollamaUrl, query) + +fmt.Println(answer.Message.Content) + +/* Results: +{ + "capital": "Ottawa", + "languages": ["English", "French"], + "name": "Canada of the West: Land of Ice and Rainbows" +} +*/ +``` + +## Quick RAG + +```golang +docs := []string{ + `Michael Burnham is the main character on the Star Trek series, Discovery. + She's a human raised on the logical planet Vulcan by Spock's father. + Burnham is intelligent and struggles to balance her human emotions with Vulcan logic. + She's become a Starfleet captain known for her determination and problem-solving skills. + Originally played by actress Sonequa Martin-Green`, + + `James T. Kirk, also known as Captain Kirk, is a fictional character from the Star Trek franchise. + He's the iconic captain of the starship USS Enterprise, + boldly exploring the galaxy with his crew. + Originally played by actor William Shatner, + Kirk has appeared in TV series, movies, and other media.`, + + `Jean-Luc Picard is a fictional character in the Star Trek franchise. + He's most famous for being the captain of the USS Enterprise-D, + a starship exploring the galaxy in the 24th century. + Picard is known for his diplomacy, intelligence, and strong moral compass. + He's been portrayed by actor Patrick Stewart.`, + + `Lieutenant Philippe Charrière, known as the **Silent Sentinel** of the USS Discovery, + is the enigmatic programming genius whose codes safeguard the ship's secrets and operations. + His swift problem-solving skills are as legendary as the mysterious aura that surrounds him. + Charrière, a man of few words, speaks the language of machines with unrivaled fluency, + making him the crew's unsung guardian in the cosmos. His best friend is Spiderman from the Marvel Cinematic Universe.`, +} + +ollamaUrl := "http://localhost:11434" +embeddingsModel := "mxbai-embed-large:latest" // This model is for the embeddings of the documents +smallChatModel := "qwen2.5:1.5b" // This model is for the chat completion + +store := embeddings.MemoryVectorStore{ + Records: make(map[string]llm.VectorRecord), +} + +// Create embeddings from documents and save them in the store +for idx, doc := range docs { + fmt.Println("Creating embedding from document ", idx) + embedding, err := embeddings.CreateEmbedding( + ollamaUrl, + llm.Query4Embedding{ + Model: embeddingsModel, + Prompt: doc, + }, + strconv.Itoa(idx), + ) + if err != nil { + fmt.Println("😡:", err) + } else { + store.Save(embedding) + } +} + +// Question for the Chat system +userContent := `Who is Philippe Charrière and what spaceship does he work on?` + +systemContent := `You are an AI assistant. Your name is Seven. +Some people are calling you Seven of Nine. +You are an expert in Star Trek. +All questions are about Star Trek. +Using the provided context, answer the user's question +to the best of your ability using only the resources provided.` + +// Create an embedding from the question +embeddingFromQuestion, err := embeddings.CreateEmbedding( + ollamaUrl, + llm.Query4Embedding{ + Model: embeddingsModel, + Prompt: userContent, + }, + "question", +) +if err != nil { + log.Fatalln("😡:", err) +} + +//🔎 searching for similarity... +similarity, _ := store.SearchMaxSimilarity(embeddingFromQuestion) + +documentsContent := `` + similarity.Prompt + `` + +query := llm.Query{ + Model: smallChatModel, + Messages: []llm.Message{ + {Role: "system", Content: systemContent}, + {Role: "system", Content: documentsContent}, + {Role: "user", Content: userContent}, + }, + Options: llm.SetOptions(map[string]interface{}{ + option.Temperature: 0.4, + option.RepeatLastN: 2, + }), +} + +fmt.Println("🤖 answer:") + +// Answer the question +_, err = completion.ChatStream(ollamaUrl, query, + func(answer llm.Answer) error { + fmt.Print(answer.Message.Content) + return nil + }) +``` \ No newline at end of file diff --git a/examples/08-embeddings-memory/main.go b/examples/08-embeddings-memory/main.go index f78364f..9f7cbb8 100644 --- a/examples/08-embeddings-memory/main.go +++ b/examples/08-embeddings-memory/main.go @@ -12,38 +12,40 @@ import ( ) -var docs = []string{ - `Michael Burnham is the main character on the Star Trek series, Discovery. - She's a human raised on the logical planet Vulcan by Spock's father. - Burnham is intelligent and struggles to balance her human emotions with Vulcan logic. - She's become a Starfleet captain known for her determination and problem-solving skills. - Originally played by actress Sonequa Martin-Green`, - - `James T. Kirk, also known as Captain Kirk, is a fictional character from the Star Trek franchise. - He's the iconic captain of the starship USS Enterprise, - boldly exploring the galaxy with his crew. - Originally played by actor William Shatner, - Kirk has appeared in TV series, movies, and other media.`, - - `Jean-Luc Picard is a fictional character in the Star Trek franchise. - He's most famous for being the captain of the USS Enterprise-D, - a starship exploring the galaxy in the 24th century. - Picard is known for his diplomacy, intelligence, and strong moral compass. - He's been portrayed by actor Patrick Stewart.`, - - `Lieutenant Philippe Charrière, known as the **Silent Sentinel** of the USS Discovery, - is the enigmatic programming genius whose codes safeguard the ship's secrets and operations. - His swift problem-solving skills are as legendary as the mysterious aura that surrounds him. - Charrière, a man of few words, speaks the language of machines with unrivaled fluency, - making him the crew's unsung guardian in the cosmos. His best friend is Spiderman from the Marvel Cinematic Universe.`, -} func main() { + + docs := []string{ + `Michael Burnham is the main character on the Star Trek series, Discovery. + She's a human raised on the logical planet Vulcan by Spock's father. + Burnham is intelligent and struggles to balance her human emotions with Vulcan logic. + She's become a Starfleet captain known for her determination and problem-solving skills. + Originally played by actress Sonequa Martin-Green`, + + `James T. Kirk, also known as Captain Kirk, is a fictional character from the Star Trek franchise. + He's the iconic captain of the starship USS Enterprise, + boldly exploring the galaxy with his crew. + Originally played by actor William Shatner, + Kirk has appeared in TV series, movies, and other media.`, + + `Jean-Luc Picard is a fictional character in the Star Trek franchise. + He's most famous for being the captain of the USS Enterprise-D, + a starship exploring the galaxy in the 24th century. + Picard is known for his diplomacy, intelligence, and strong moral compass. + He's been portrayed by actor Patrick Stewart.`, + + `Lieutenant Philippe Charrière, known as the **Silent Sentinel** of the USS Discovery, + is the enigmatic programming genius whose codes safeguard the ship's secrets and operations. + His swift problem-solving skills are as legendary as the mysterious aura that surrounds him. + Charrière, a man of few words, speaks the language of machines with unrivaled fluency, + making him the crew's unsung guardian in the cosmos. His best friend is Spiderman from the Marvel Cinematic Universe.`, + } + ollamaUrl := "http://localhost:11434" // if working from a container //ollamaUrl := "http://host.docker.internal:11434" - var embeddingsModel = "all-minilm" // This model is for the embeddings of the documents - var smallChatModel = "qwen:0.5b" // This model is for the chat completion + var embeddingsModel = "mxbai-embed-large:latest" // This model is for the embeddings of the documents + var smallChatModel = "qwen2.5:1.5b" // This model is for the chat completion store := embeddings.MemoryVectorStore{ Records: make(map[string]llm.VectorRecord), diff --git a/examples/15-mistral-function-calling/go.mod b/examples/15-mistral-function-calling/go.mod index 4466961..ce744eb 100644 --- a/examples/15-mistral-function-calling/go.mod +++ b/examples/15-mistral-function-calling/go.mod @@ -4,4 +4,6 @@ go 1.23.1 require github.com/parakeet-nest/parakeet v0.2.3 -replace github.com/parakeet-nest/parakeet => ../.. \ No newline at end of file +require github.com/mark3labs/mcp-go v0.8.3 // indirect + +replace github.com/parakeet-nest/parakeet => ../.. diff --git a/examples/15-mistral-function-calling/go.sum b/examples/15-mistral-function-calling/go.sum new file mode 100644 index 0000000..71582c1 --- /dev/null +++ b/examples/15-mistral-function-calling/go.sum @@ -0,0 +1,2 @@ +github.com/mark3labs/mcp-go v0.8.3 h1:IzlyN8BaP4YwUMUDqxOGJhGdZXEDQiAPX43dNPgnzrg= +github.com/mark3labs/mcp-go v0.8.3/go.mod h1:cjMlBU0cv/cj9kjlgmRhoJ5JREdS7YX83xeIG9Ko/jE= diff --git a/examples/17-fake-function-calling/go.mod b/examples/17-fake-function-calling/go.mod index 0a38062..064fa2e 100644 --- a/examples/17-fake-function-calling/go.mod +++ b/examples/17-fake-function-calling/go.mod @@ -4,4 +4,6 @@ go 1.23.1 require github.com/parakeet-nest/parakeet v0.2.3 -replace github.com/parakeet-nest/parakeet => ../.. \ No newline at end of file +require github.com/mark3labs/mcp-go v0.8.3 // indirect + +replace github.com/parakeet-nest/parakeet => ../.. diff --git a/examples/17-fake-function-calling/go.sum b/examples/17-fake-function-calling/go.sum new file mode 100644 index 0000000..71582c1 --- /dev/null +++ b/examples/17-fake-function-calling/go.sum @@ -0,0 +1,2 @@ +github.com/mark3labs/mcp-go v0.8.3 h1:IzlyN8BaP4YwUMUDqxOGJhGdZXEDQiAPX43dNPgnzrg= +github.com/mark3labs/mcp-go v0.8.3/go.mod h1:cjMlBU0cv/cj9kjlgmRhoJ5JREdS7YX83xeIG9Ko/jE= diff --git a/examples/52-constraints/go.mod b/examples/52-constraints/go.mod index 8333f3e..f08c297 100644 --- a/examples/52-constraints/go.mod +++ b/examples/52-constraints/go.mod @@ -4,4 +4,6 @@ go 1.23.1 require github.com/parakeet-nest/parakeet v0.2.3 +require github.com/mark3labs/mcp-go v0.8.3 // indirect + replace github.com/parakeet-nest/parakeet => ../.. diff --git a/examples/52-constraints/go.sum b/examples/52-constraints/go.sum new file mode 100644 index 0000000..71582c1 --- /dev/null +++ b/examples/52-constraints/go.sum @@ -0,0 +1,2 @@ +github.com/mark3labs/mcp-go v0.8.3 h1:IzlyN8BaP4YwUMUDqxOGJhGdZXEDQiAPX43dNPgnzrg= +github.com/mark3labs/mcp-go v0.8.3/go.mod h1:cjMlBU0cv/cj9kjlgmRhoJ5JREdS7YX83xeIG9Ko/jE= diff --git a/examples/67-mcp/go.mod b/examples/67-mcp/go.mod index 91fc7bf..8c7db8f 100644 --- a/examples/67-mcp/go.mod +++ b/examples/67-mcp/go.mod @@ -3,8 +3,10 @@ module 67-mcp go 1.23.1 require ( - github.com/mark3labs/mcp-go v0.8.3 + github.com/joho/godotenv v1.5.1 github.com/parakeet-nest/parakeet v0.0.0-00010101000000-000000000000 ) +require github.com/mark3labs/mcp-go v0.8.3 // indirect + replace github.com/parakeet-nest/parakeet => ../.. diff --git a/examples/67-mcp/go.sum b/examples/67-mcp/go.sum index a8aa877..4af11ef 100644 --- a/examples/67-mcp/go.sum +++ b/examples/67-mcp/go.sum @@ -1,4 +1,6 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/mark3labs/mcp-go v0.8.3 h1:IzlyN8BaP4YwUMUDqxOGJhGdZXEDQiAPX43dNPgnzrg= github.com/mark3labs/mcp-go v0.8.3/go.mod h1:cjMlBU0cv/cj9kjlgmRhoJ5JREdS7YX83xeIG9Ko/jE= diff --git a/examples/67-mcp/mcp-server/go.mod b/examples/67-mcp/mcp-server/go.mod index ada02a1..8656ca1 100644 --- a/examples/67-mcp/mcp-server/go.mod +++ b/examples/67-mcp/mcp-server/go.mod @@ -2,6 +2,6 @@ module mcp-curl go 1.23.1 -require github.com/mark3labs/mcp-go v0.8.2 +require github.com/mark3labs/mcp-go v0.8.3 require github.com/google/uuid v1.6.0 // indirect diff --git a/examples/67-mcp/mcp-server/go.sum b/examples/67-mcp/mcp-server/go.sum index 01abab7..2538938 100644 --- a/examples/67-mcp/mcp-server/go.sum +++ b/examples/67-mcp/mcp-server/go.sum @@ -2,8 +2,8 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/mark3labs/mcp-go v0.8.2 h1:OtqqXlRqjXs6zuMhf1uiuQ2iqBrhMGgLpDeVDUWMKFc= -github.com/mark3labs/mcp-go v0.8.2/go.mod h1:cjMlBU0cv/cj9kjlgmRhoJ5JREdS7YX83xeIG9Ko/jE= +github.com/mark3labs/mcp-go v0.8.3 h1:IzlyN8BaP4YwUMUDqxOGJhGdZXEDQiAPX43dNPgnzrg= +github.com/mark3labs/mcp-go v0.8.3/go.mod h1:cjMlBU0cv/cj9kjlgmRhoJ5JREdS7YX83xeIG9Ko/jE= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= diff --git a/examples/68-deepseek-r1/.env b/examples/68-deepseek-r1/.env new file mode 100644 index 0000000..b68bfc6 --- /dev/null +++ b/examples/68-deepseek-r1/.env @@ -0,0 +1,5 @@ +# if working from a container +#OLLAMA_HOST=http://host.docker.internal:11434 +OLLAMA_HOST=http://localhost:11434 +LLM_CHAT=deepseek-r1:1.5b + diff --git a/examples/68-deepseek-r1/README.md b/examples/68-deepseek-r1/README.md new file mode 100644 index 0000000..08b05d5 --- /dev/null +++ b/examples/68-deepseek-r1/README.md @@ -0,0 +1,11 @@ +# Iris Classification (Expert System) + +- Improved results by adding classification rules +- Achieved 100% accuracy with proper guidance and complete data + +The key takeaway is that small models (1.5b parameters) can be effective when: +1. The task is well-defined +2. Proper guidance is provided +3. Relevant data is available +4. Appropriate techniques (like RAG) are used to manage larger datasets + diff --git a/examples/68-deepseek-r1/go.mod b/examples/68-deepseek-r1/go.mod new file mode 100644 index 0000000..699cc6d --- /dev/null +++ b/examples/68-deepseek-r1/go.mod @@ -0,0 +1,10 @@ +module 68-deepseek-r1 + +go 1.23.1 + +require ( + github.com/joho/godotenv v1.5.1 + github.com/parakeet-nest/parakeet v0.0.0-00010101000000-000000000000 +) + +replace github.com/parakeet-nest/parakeet => ../.. diff --git a/examples/68-deepseek-r1/go.sum b/examples/68-deepseek-r1/go.sum new file mode 100644 index 0000000..d61b19e --- /dev/null +++ b/examples/68-deepseek-r1/go.sum @@ -0,0 +1,2 @@ +github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= +github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= diff --git a/examples/68-deepseek-r1/iris-database.xml b/examples/68-deepseek-r1/iris-database.xml new file mode 100644 index 0000000..899d9e2 --- /dev/null +++ b/examples/68-deepseek-r1/iris-database.xml @@ -0,0 +1,135 @@ + + + + + Species_No + Petal_width + Petal_length + Sepal_width + Sepal_length + Species_name + + + + + 1 + 0,2 + 1,4 + 3,5 + 5,1 + Setosa + + + 1 + 0,2 + 1,4 + 3 + 4,9 + Setosa + + + 1 + 0,2 + 1,3 + 3,2 + 4,7 + Setosa + + + 1 + 0,2 + 1,5 + 3,1 + 4,6 + Setosa + + + 1 + 0,2 + 1,4 + 3,6 + 5 + Setosa + + + 1 + 0,4 + 1,7 + 3,9 + 5,4 + Setosa + + + 1 + 0,3 + 1,4 + 3,4 + 4,6 + Setosa + + + 1 + 0,2 + 1,5 + 3,4 + 5 + Setosa + + + 1 + 0,2 + 1,4 + 2,9 + 4,4 + Setosa + + + 1 + 0,1 + 1,5 + 3,1 + 4,9 + Setosa + + + 2 + 1,4 + 4,7 + 3,2 + 7 + Versicolor + + + 2 + 1,5 + 4,5 + 3,2 + 6,4 + Versicolor + + + 3 + 2,5 + 6 + 3,3 + 6,3 + Verginica + + + 3 + 1,9 + 5,1 + 2,7 + 5,8 + Verginica + + + 3 + 2,1 + 5,9 + 3 + 7,1 + Verginica + + + \ No newline at end of file diff --git a/examples/68-deepseek-r1/iris-instructions.md b/examples/68-deepseek-r1/iris-instructions.md new file mode 100644 index 0000000..7af97dc --- /dev/null +++ b/examples/68-deepseek-r1/iris-instructions.md @@ -0,0 +1,67 @@ +# Instructions for Iris Species Classification + +As an LLM tasked with iris species classification, you must follow these steps to analyze the four key measurements and determine the species (Setosa, Versicolor, or Verginica). + +## Input Features +You will be given four numerical measurements: +1. Petal width (in cm) +2. Petal length (in cm) +3. Sepal width (in cm) +4. Sepal length (in cm) + +## Classification Process + +### Step 1: Primary Feature Analysis +First, examine the petal measurements as they are the most discriminative features: +- Setosa has distinctively small petals + - Petal length < 2 cm + - Petal width < 0.5 cm + +### Step 2: Secondary Feature Analysis +If the specimen is not clearly Setosa, analyze the combination of features: + +For Versicolor: +- Petal length typically between 3-5 cm +- Petal width between 1.0-1.8 cm +- Sepal length typically between 5-7 cm +- Sepal width typically between 2-3.5 cm + +For Verginica: +- Petal length typically > 4.5 cm +- Petal width typically > 1.4 cm +- Sepal length typically > 6 cm +- Sepal width typically between 2.5-3.8 cm + +### Step 3: Decision Making +1. If petal measurements match Setosa's distinctive small size → Classify as Setosa +2. If measurements fall in the intermediate range → Classify as Versicolor +3. If measurements show larger values, especially in petal length → Classify as Verginica + +### Step 4: Confidence Check +- Consider the clarity of the distinction: + - Are the measurements clearly in one category's range? + - Are there any overlapping characteristics? + - Express any uncertainty if measurements are in borderline ranges + +### Step 5: Explanation +Provide reasoning for your classification by: +1. Highlighting which measurements were most decisive +2. Explaining why certain features led to your conclusion +3. Noting any unusual or borderline measurements + +## Example Reasoning +"Given a specimen with: +- Petal width: 0.2 cm +- Petal length: 1.4 cm +- Sepal width: 3.5 cm +- Sepal length: 5.1 cm + +Classification process: +1. The very small petal measurements (width 0.2 cm, length 1.4 cm) are highly characteristic of Setosa +2. These petal dimensions are well below the ranges for Versicolor and Verginica +3. The sepal measurements support this classification, being in the typical range for Setosa +4. Confidence is high due to the distinctive petal size + +Therefore, this specimen is classified as Setosa with high confidence." + +After a certain point in your response, once you feel you have thoroughly addressed the main question or topic, please wrap up your reasoning process and conclude your answer, rather than going on indefinitely. \ No newline at end of file diff --git a/examples/68-deepseek-r1/main.go b/examples/68-deepseek-r1/main.go new file mode 100644 index 0000000..2381b4e --- /dev/null +++ b/examples/68-deepseek-r1/main.go @@ -0,0 +1,129 @@ +package main + +import ( + "fmt" + "log" + "os" + + "github.com/joho/godotenv" + "github.com/parakeet-nest/parakeet/completion" + "github.com/parakeet-nest/parakeet/enums/option" + "github.com/parakeet-nest/parakeet/llm" +) + +func main() { + err := godotenv.Load() + if err != nil { + log.Fatalln("😡:", err) + } + + ollamaUrl := os.Getenv("OLLAMA_HOST") + if ollamaUrl == "" { + ollamaUrl = "http://localhost:11434" + } + + model := os.Getenv("LLM_CHAT") + if model == "" { + model = "deepseek-r1:1.5b" + } + + fmt.Println("🌍", ollamaUrl, "📕", model) + + systemInstructions, err := os.ReadFile("system-instructions.md") + if err != nil { + log.Fatal("😡:", err) + } + + irisInstructions, err := os.ReadFile("iris-instructions.md") + if err != nil { + log.Fatal("😡:", err) + } + + irisDatabase, err := os.ReadFile("iris-database.xml") + if err != nil { + log.Fatal("😡:", err) + } + + /* + // Verginica + userContent := `Using the above information and the below information, + Given a specimen with: + - Petal width: 2,5 cm + - Petal length: 6 cm + - Sepal width: 3,3 cm + - Sepal length: 6,3 cm + What is the species of the iris?` + + // Versicolor + userContent := `Using the above information and the below information, + Given a specimen with: + - Petal width: 1,5 cm + - Petal length: 4,5 cm + - Sepal width: 3,2 cm + - Sepal length: 6,4 cm + What is the species of the iris?` + + // Setosa + userContent := `Using the above information and the below information, + Given a specimen with: + - Petal width: 0,2 cm + - Petal length: 1,4 cm + - Sepal width: 3,6 cm + - Sepal length: 5 cm + What is the species of the iris?` + */ + + // Verginica + userContent := `Using the above information and the below information, + Given a specimen with: + - Petal width: 1,9 cm + - Petal length: 5,1 cm + - Sepal width: 2,7 cm + - Sepal length: 5,8 cm + What is the species of the iris?` + + options := llm.SetOptions(map[string]interface{}{ + option.Temperature: 0.0, + option.RepeatLastN: 2, + option.RepeatPenalty: 2.2, + option.TopK: 10, + option.TopP: 0.5, + }) + + // Prompt construction + messages := []llm.Message{ + {Role: "system", Content: string(systemInstructions)}, + {Role: "system", Content: string(irisInstructions)}, + {Role: "system", Content: "# Iris Database\n" + string(irisDatabase)}, + {Role: "user", Content: userContent}, + } + + query := llm.Query{ + Model: model, + Messages: messages, + Options: options, + } + + _, err = completion.ChatStream(ollamaUrl, query, + func(answer llm.Answer) error { + fmt.Print(answer.Message.Content) + return nil + }) + + if err != nil { + switch e := err.(type) { + case *completion.ModelNotFoundError: + fmt.Printf("💥 Got Model Not Found error: %s\n", e.Message) + fmt.Printf("😡 Error code: %d\n", e.Code) + fmt.Printf("🧠 Expected Model: %s\n", e.Model) + + case *completion.NoSuchOllamaHostError: + fmt.Printf("🦙 Got No Such Ollama Host error: %s\n", e.Message) + fmt.Printf("🌍 Expected Host: %s\n", e.Host) + + default: + log.Fatal("😡:", err) + } + + } +} diff --git a/examples/68-deepseek-r1/system-instructions.md b/examples/68-deepseek-r1/system-instructions.md new file mode 100644 index 0000000..cdd1c34 --- /dev/null +++ b/examples/68-deepseek-r1/system-instructions.md @@ -0,0 +1 @@ +You are an expert with iris species \ No newline at end of file diff --git a/go.work b/go.work index 553e4ff..d1e7499 100644 --- a/go.work +++ b/go.work @@ -121,6 +121,7 @@ use ( ./examples/66-structured-outputs ./examples/67-mcp ./examples/67-mcp/mcp-server + ./examples/68-deepseek-r1 examples/90-characters examples/91-characters