Skip to content

Commit

Permalink
fix(client): make llm chat service works with pydantic v2 (#3027)
Browse files Browse the repository at this point in the history
  • Loading branch information
jialeicui authored Nov 24, 2023
1 parent 67d6fee commit 8dcf930
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 7 deletions.
2 changes: 1 addition & 1 deletion client/starwhale/api/_impl/service/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def _serve_builtin(
app = FastAPI(title=title)

@app.get("/api/spec")
def spec() -> ServiceSpec | None:
def spec() -> t.Union[ServiceSpec, None]:
return self.get_spec()

for _api in self.apis.values():
Expand Down
11 changes: 5 additions & 6 deletions client/starwhale/api/_impl/service/types/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,10 @@ class Message:
class Query(BaseModel):
user_input: str
history: List[Message]
confidence: Optional[float]
top_k: Optional[float]
top_p: Optional[float]
temperature: Optional[float]
max_new_tokens: Optional[int]
top_k: Optional[int] = None
top_p: Optional[float] = None
temperature: Optional[float] = None
max_new_tokens: Optional[int] = None


class LLMChat(ServiceType):
Expand All @@ -32,7 +31,7 @@ class LLMChat(ServiceType):
arg_types = {
"user_input": str,
"history": list, # list of Message
"top_k": float,
"top_k": int,
"top_p": float,
"temperature": float,
"max_new_tokens": int,
Expand Down

0 comments on commit 8dcf930

Please sign in to comment.