Skip to content

Commit

Permalink
Release 0.0.59
Browse files Browse the repository at this point in the history
  • Loading branch information
fern-api[bot] committed Aug 29, 2023
1 parent dff197a commit 92da430
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 2 deletions.
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "superagent-py"
version = "v0.0.58"
version = "v0.0.59"
description = ""
readme = "README.md"
authors = []
Expand Down
2 changes: 1 addition & 1 deletion src/superagent/core/client_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "superagent-py",
"X-Fern-SDK-Version": "v0.0.58",
"X-Fern-SDK-Version": "v0.0.59",
}
token = self._get_token()
if token is not None:
Expand Down
10 changes: 10 additions & 0 deletions src/superagent/resources/agent/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,7 @@ def prompt_agent(
input: typing.Dict[str, typing.Any],
has_streaming: typing.Optional[bool] = OMIT,
session: typing.Optional[str] = OMIT,
cache_ttl: typing.Optional[int] = OMIT,
) -> PredictAgentOutput:
"""
Invoke a specific agent
Expand All @@ -207,12 +208,16 @@ def prompt_agent(
- has_streaming: typing.Optional[bool].
- session: typing.Optional[str].
- cache_ttl: typing.Optional[int].
"""
_request: typing.Dict[str, typing.Any] = {"input": input}
if has_streaming is not OMIT:
_request["has_streaming"] = has_streaming
if session is not OMIT:
_request["session"] = session
if cache_ttl is not OMIT:
_request["cache_ttl"] = cache_ttl
_response = self._client_wrapper.httpx_client.request(
"POST",
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/predict"),
Expand Down Expand Up @@ -407,6 +412,7 @@ async def prompt_agent(
input: typing.Dict[str, typing.Any],
has_streaming: typing.Optional[bool] = OMIT,
session: typing.Optional[str] = OMIT,
cache_ttl: typing.Optional[int] = OMIT,
) -> PredictAgentOutput:
"""
Invoke a specific agent
Expand All @@ -419,12 +425,16 @@ async def prompt_agent(
- has_streaming: typing.Optional[bool].
- session: typing.Optional[str].
- cache_ttl: typing.Optional[int].
"""
_request: typing.Dict[str, typing.Any] = {"input": input}
if has_streaming is not OMIT:
_request["has_streaming"] = has_streaming
if session is not OMIT:
_request["session"] = session
if cache_ttl is not OMIT:
_request["cache_ttl"] = cache_ttl
_response = await self._client_wrapper.httpx_client.request(
"POST",
urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/predict"),
Expand Down

0 comments on commit 92da430

Please sign in to comment.