Skip to content

Commit

Permalink
Update llm CLI (#94)
Browse files Browse the repository at this point in the history
  • Loading branch information
ling0322 authored Aug 22, 2024
1 parent e6efedf commit e4ea6fe
Show file tree
Hide file tree
Showing 6 changed files with 228 additions and 71 deletions.
12 changes: 6 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,13 @@ Welcome to libLLM, an open-source project designed for efficient inference of la

| Model | Download | llm Command |
|-------------|----------------|---------------|
| Index-1.9B-Character (Role-playing) | [🤗[HF](https://huggingface.co/ling0322/bilibili-index-1.9b-libllm/blob/main/bilibili-index-1.9b-character-q4.llmpkg)] | llm chat -m index:character |
| Index-1.9B-Chat | [🤗[HF](https://huggingface.co/ling0322/bilibili-index-1.9b-libllm/blob/main/bilibili-index-1.9b-chat-q4.llmpkg)] | llm chat -m index |
| Qwen2-1.5B-Instruct | [🤗[HF](https://huggingface.co/ling0322/qwen-libllm/blob/main/qwen2-1.5b-instruct-q4.llmpkg)] | llm chat -m qwen:1.5b |
| Qwen2-7B-Instruct | [🤗[HF](https://huggingface.co/ling0322/qwen-libllm/blob/main/qwen2-7b-instruct-q4.llmpkg)] | llm chat -m qwen:7b |
| Whisper-large-v3 | [🤗[HF](https://huggingface.co/ling0322/whisper-libllm/resolve/main/whisper-large-v3-q4.llmpkg)] | llm transcribe -m whisper |
| Index-1.9B-Character (Role-playing) | [🤗[HF](https://huggingface.co/ling0322/bilibili-index-1.9b-libllm/blob/main/bilibili-index-1.9b-character-q4.llmpkg)] [[MS](https://modelscope.cn/models/ling0322/bilibili-index-libllm/file/view/master?fileName=bilibili-index-1.9b-character-q4.llmpkg&status=2)] | llm chat -m index:character |
| Index-1.9B-Chat | [🤗[HF](https://huggingface.co/ling0322/bilibili-index-1.9b-libllm/blob/main/bilibili-index-1.9b-chat-q4.llmpkg)] [[MS](https://modelscope.cn/models/ling0322/bilibili-index-libllm/file/view/master?fileName=bilibili-index-1.9b-chat-q4.llmpkg&status=2)] | llm chat -m index |
| Qwen2-1.5B-Instruct | [🤗[HF](https://huggingface.co/ling0322/qwen-libllm/blob/main/qwen2-1.5b-instruct-q4.llmpkg)] [[MS](https://modelscope.cn/models/ling0322/qwen2-libllm/file/view/master?fileName=qwen2-1.5b-instruct-q4.llmpkg&status=2)] | llm chat -m qwen:1.5b |
| Qwen2-7B-Instruct | [🤗[HF](https://huggingface.co/ling0322/qwen-libllm/blob/main/qwen2-7b-instruct-q4.llmpkg)] [[MS](https://modelscope.cn/models/ling0322/qwen2-libllm/file/view/master?fileName=qwen2-7b-instruct-q4.llmpkg&status=2)] | llm chat -m qwen:7b |
| Whisper-large-v3 | [🤗[HF](https://huggingface.co/ling0322/whisper-libllm/resolve/main/whisper-large-v3-q4.llmpkg)] [[MS](https://modelscope.cn/models/ling0322/whisper-libllm/file/view/master?fileName=whisper-large-v3-q4.llmpkg&status=2)] | llm transcribe -m whisper |

`HF` = HuggingFace
`HF` = HuggingFace, `MS` = ModelScope

## Recent updates

Expand Down
52 changes: 27 additions & 25 deletions go/bin/args.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ package main

import (
"flag"
"fmt"
"log"
"log/slog"
"os"
Expand All @@ -33,14 +34,25 @@ import (
type binArgs struct {
fs *flag.FlagSet

modelPath string
models modelList
device string
inputFile string
outputFile string
lang string
targetLang string
}

type modelList []string

func (l *modelList) String() string {
return fmt.Sprintf("%s", *l)
}

func (l *modelList) Set(value string) error {
*l = append(*l, value)
return nil
}

func newBinArgs(fs *flag.FlagSet) *binArgs {
return &binArgs{
fs: fs,
Expand All @@ -67,55 +79,43 @@ func (a *binArgs) getDevice() llm.Device {
}

func (a *binArgs) addModelFlag() {
a.fs.StringVar(&a.modelPath, "m", "", "the libllm model, it could be model name or model file,"+
a.fs.Var(&a.models, "m", "the libllm model, it could be model name or model file,"+
" model files are with suffix \".llmpkg\". "+
"\nFor some specific tasks like transcription-translation, it could be a list of"+
" models, seperated by \",\".")
}

func (a *binArgs) getModel() string {
if a.modelPath == "" {
if len(a.models) == 0 {
slog.Error("model name (-m) is empty.")
a.fs.Usage()
os.Exit(1)
}

models := a.splitModels()
if len(models) != 1 {
if len(a.models) != 1 {
slog.Error("only 1 model (-m) is expected, please check if there is any unexpected comma" +
" \",\" in model arg (-m).")
a.fs.Usage()
os.Exit(1)
}

return models[0]
return a.models[0]
}

func (a *binArgs) splitModels() []string {
models := strings.Split(a.modelPath, ",")
for _, model := range models {
func (a *binArgs) getModels() []string {
for _, model := range a.models {
if model == "" {
slog.Error("invalid model name (-m).")
a.fs.Usage()
os.Exit(1)
}
}

return models
return a.models
}

func (a *binArgs) getNumModels() int {
return len(a.splitModels())
}

func (a *binArgs) getModelList() []string {
if a.modelPath == "" {
slog.Error("model name (-m) is empty.")
a.fs.Usage()
os.Exit(1)
}

return a.splitModels()
return len(a.models)
}

func (a *binArgs) addInputFlag() {
Expand Down Expand Up @@ -146,6 +146,10 @@ func (a *binArgs) getOutput() string {
return a.outputFile
}

func (a *binArgs) tryGetOutput() string {
return a.outputFile
}

func (a *binArgs) addLangFlag() {
a.fs.StringVar(&a.lang, "lang", "", "the language of input.")
}
Expand All @@ -171,14 +175,12 @@ func (a *binArgs) addTargetLangFlag() {

func (a *binArgs) getTargetLang() skill.Lang {
if a.targetLang == "" {
slog.Error("target language (-targetlang) is empty.")
a.fs.Usage()
os.Exit(1)
return skill.UnknownLanguage
}

lang, err := skill.ParseLang(a.targetLang)
if err != nil {
log.Fatal(err)
return skill.UnknownLanguage
}

return lang
Expand Down
49 changes: 43 additions & 6 deletions go/bin/download.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"path"
"path/filepath"
"runtime"
"time"

"github.com/ling0322/libllm/go/llm"
"github.com/schollz/progressbar/v3"
Expand All @@ -28,6 +29,14 @@ var modelUrls = map[string]string{
"qwen:1.5b:q4": "https://huggingface.co/ling0322/qwen-libllm/resolve/main/qwen2-1.5b-instruct-q4.llmpkg",
}

var modelMsUrls = map[string]string{
"index:chat:q4": "https://modelscope.cn/models/ling0322/bilibili-index-libllm/resolve/master/bilibili-index-1.9b-chat-q4.llmpkg",
"index:character:q4": "https://modelscope.cn/models/ling0322/bilibili-index-libllm/resolve/master/bilibili-index-1.9b-character-q4.llmpkg",
"whisper:large-v3:q4": "https://modelscope.cn/models/ling0322/whisper-libllm/resolve/master/whisper-large-v3-q4.llmpkg",
"qwen:7b:q4": "https://modelscope.cn/models/ling0322/qwen2-libllm/resolve/master/qwen2-7b-instruct-q4.llmpkg",
"qwen:1.5b:q4": "https://modelscope.cn/models/ling0322/qwen2-libllm/resolve/master/qwen2-1.5b-instruct-q4.llmpkg",
}

var modelFilenames = map[string]string{
"index:chat:q4": "bilibili-index-1.9b-chat-q4.llmpkg",
"index:character:q4": "bilibili-index-1.9b-character-q4.llmpkg",
Expand Down Expand Up @@ -112,8 +121,36 @@ func downloadFile(url, localPath, filename string) error {
return nil
}

func isInChina() bool {
client := http.Client{
Timeout: 5 * time.Second,
}

resp, err := client.Get("https://www.google.com")
if err != nil {
return true
}
defer resp.Body.Close()

return false
}

func downloadModel(name string) (modelPath string, err error) {
url, ok := modelUrls[name]
slog.Info("download model", "name", name)

name, err = resolveModelName(name)
if err != nil {
return
}

var url string
var ok bool
if isInChina() {
url, ok = modelMsUrls[name]
} else {
url, ok = modelUrls[name]
}

if !ok {
log.Fatal("invalid model name")
}
Expand Down Expand Up @@ -148,6 +185,11 @@ func downloadModel(name string) (modelPath string, err error) {
// check if model exists in the cache directory. If exists, retuen the model path, otherwise,
// return the error.
func checkModelInCache(name string) (modelPath string, err error) {
name, err = resolveModelName(name)
if err != nil {
return
}

filename, ok := modelFilenames[name]
if !ok {
return "", ErrInvalidModelName
Expand All @@ -163,11 +205,6 @@ func checkModelInCache(name string) (modelPath string, err error) {
}

func getOrDownloadModel(name string) (modelPath string, err error) {
name, err = resolveModelName(name)
if err != nil {
return
}

modelPath, err = checkModelInCache(name)
if err == nil {
return
Expand Down
Loading

0 comments on commit e4ea6fe

Please sign in to comment.