diff --git a/pyproject.toml b/pyproject.toml index 0126afb..fb4b820 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "superagent-py" -version = "v0.0.60" +version = "v0.0.99" description = "" readme = "README.md" authors = [] diff --git a/src/superagent/__init__.py b/src/superagent/__init__.py index f96ac8c..5ffa8b9 100644 --- a/src/superagent/__init__.py +++ b/src/superagent/__init__.py @@ -1,101 +1,62 @@ # This file was auto-generated by Fern from our API Definition. from .types import ( - AgentDocumentListOuput, - AgentDocumentOutput, - AgentListOutput, - AgentOutput, - AgentToolListOutput, - AgentToolOutput, - AgentType, - ApiTokenListOutput, - ApiTokenOutput, - AppLibModelsResponseAgent, - AppLibModelsResponseAgentDocument, - AppLibModelsResponseAgentTool, - AppLibModelsResponseApiToken, - AppLibModelsResponseDocument, - AppLibModelsResponsePrompt, - AppLibModelsResponseTag, - AppLibModelsResponseTool, - DocumentListOutput, - DocumentOuput, - DocumentType, + AgentDatasosurceList, + AgentList, + AgentRunList, + AgentToolList, + ApiUser, + AppModelsRequestAgent, + AppModelsRequestDatasource, + AppModelsRequestLlm, + AppModelsRequestTool, + AppModelsRequestWorkflow, + AppModelsResponseAgent, + AppModelsResponseAgentInvoke, + AppModelsResponseDatasource, + AppModelsResponseLlm, + AppModelsResponseTool, + AppModelsResponseWorkflow, + DatasourceList, HttpValidationError, - PredictAgentOutput, - PromptListOutput, - PromptOutput, - SignInOutput, - TagListOutput, - TagOutput, - ToolListOutput, - ToolOutput, - ToolType, - User, - UserOutput, + LlmList, + ToolList, ValidationError, ValidationErrorLocItem, + WorkflowList, ) from .errors import UnprocessableEntityError -from .resources import ( - agent, - agent_documents, - agent_tools, - api_token, - auth, - documents, - prompts, - tags, - tools, - traces, - user, -) +from .resources import agent, api_user, datasource, llm, tool, workflow __all__ = [ - "AgentDocumentListOuput", - "AgentDocumentOutput", - "AgentListOutput", - "AgentOutput", - "AgentToolListOutput", - "AgentToolOutput", - "AgentType", - "ApiTokenListOutput", - "ApiTokenOutput", - "AppLibModelsResponseAgent", - "AppLibModelsResponseAgentDocument", - "AppLibModelsResponseAgentTool", - "AppLibModelsResponseApiToken", - "AppLibModelsResponseDocument", - "AppLibModelsResponsePrompt", - "AppLibModelsResponseTag", - "AppLibModelsResponseTool", - "DocumentListOutput", - "DocumentOuput", - "DocumentType", + "AgentDatasosurceList", + "AgentList", + "AgentRunList", + "AgentToolList", + "ApiUser", + "AppModelsRequestAgent", + "AppModelsRequestDatasource", + "AppModelsRequestLlm", + "AppModelsRequestTool", + "AppModelsRequestWorkflow", + "AppModelsResponseAgent", + "AppModelsResponseAgentInvoke", + "AppModelsResponseDatasource", + "AppModelsResponseLlm", + "AppModelsResponseTool", + "AppModelsResponseWorkflow", + "DatasourceList", "HttpValidationError", - "PredictAgentOutput", - "PromptListOutput", - "PromptOutput", - "SignInOutput", - "TagListOutput", - "TagOutput", - "ToolListOutput", - "ToolOutput", - "ToolType", + "LlmList", + "ToolList", "UnprocessableEntityError", - "User", - "UserOutput", "ValidationError", "ValidationErrorLocItem", + "WorkflowList", "agent", - "agent_documents", - "agent_tools", - "api_token", - "auth", - "documents", - "prompts", - "tags", - "tools", - "traces", - "user", + "api_user", + "datasource", + "llm", + "tool", + "workflow", ] diff --git a/src/superagent/client.py b/src/superagent/client.py index cb4627a..bdd83e4 100644 --- a/src/superagent/client.py +++ b/src/superagent/client.py @@ -6,16 +6,11 @@ from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from .resources.agent.client import AgentClient, AsyncAgentClient -from .resources.agent_documents.client import AgentDocumentsClient, AsyncAgentDocumentsClient -from .resources.agent_tools.client import AgentToolsClient, AsyncAgentToolsClient -from .resources.api_token.client import ApiTokenClient, AsyncApiTokenClient -from .resources.auth.client import AsyncAuthClient, AuthClient -from .resources.documents.client import AsyncDocumentsClient, DocumentsClient -from .resources.prompts.client import AsyncPromptsClient, PromptsClient -from .resources.tags.client import AsyncTagsClient, TagsClient -from .resources.tools.client import AsyncToolsClient, ToolsClient -from .resources.traces.client import AsyncTracesClient, TracesClient -from .resources.user.client import AsyncUserClient, UserClient +from .resources.api_user.client import ApiUserClient, AsyncApiUserClient +from .resources.datasource.client import AsyncDatasourceClient, DatasourceClient +from .resources.llm.client import AsyncLlmClient, LlmClient +from .resources.tool.client import AsyncToolClient, ToolClient +from .resources.workflow.client import AsyncWorkflowClient, WorkflowClient class Superagent: @@ -30,16 +25,11 @@ def __init__( base_url=base_url, token=token, httpx_client=httpx.Client(timeout=timeout) ) self.agent = AgentClient(client_wrapper=self._client_wrapper) - self.agent_documents = AgentDocumentsClient(client_wrapper=self._client_wrapper) - self.tags = TagsClient(client_wrapper=self._client_wrapper) - self.agent_tools = AgentToolsClient(client_wrapper=self._client_wrapper) - self.auth = AuthClient(client_wrapper=self._client_wrapper) - self.user = UserClient(client_wrapper=self._client_wrapper) - self.api_token = ApiTokenClient(client_wrapper=self._client_wrapper) - self.documents = DocumentsClient(client_wrapper=self._client_wrapper) - self.prompts = PromptsClient(client_wrapper=self._client_wrapper) - self.tools = ToolsClient(client_wrapper=self._client_wrapper) - self.traces = TracesClient(client_wrapper=self._client_wrapper) + self.llm = LlmClient(client_wrapper=self._client_wrapper) + self.api_user = ApiUserClient(client_wrapper=self._client_wrapper) + self.datasource = DatasourceClient(client_wrapper=self._client_wrapper) + self.tool = ToolClient(client_wrapper=self._client_wrapper) + self.workflow = WorkflowClient(client_wrapper=self._client_wrapper) class AsyncSuperagent: @@ -54,13 +44,8 @@ def __init__( base_url=base_url, token=token, httpx_client=httpx.AsyncClient(timeout=timeout) ) self.agent = AsyncAgentClient(client_wrapper=self._client_wrapper) - self.agent_documents = AsyncAgentDocumentsClient(client_wrapper=self._client_wrapper) - self.tags = AsyncTagsClient(client_wrapper=self._client_wrapper) - self.agent_tools = AsyncAgentToolsClient(client_wrapper=self._client_wrapper) - self.auth = AsyncAuthClient(client_wrapper=self._client_wrapper) - self.user = AsyncUserClient(client_wrapper=self._client_wrapper) - self.api_token = AsyncApiTokenClient(client_wrapper=self._client_wrapper) - self.documents = AsyncDocumentsClient(client_wrapper=self._client_wrapper) - self.prompts = AsyncPromptsClient(client_wrapper=self._client_wrapper) - self.tools = AsyncToolsClient(client_wrapper=self._client_wrapper) - self.traces = AsyncTracesClient(client_wrapper=self._client_wrapper) + self.llm = AsyncLlmClient(client_wrapper=self._client_wrapper) + self.api_user = AsyncApiUserClient(client_wrapper=self._client_wrapper) + self.datasource = AsyncDatasourceClient(client_wrapper=self._client_wrapper) + self.tool = AsyncToolClient(client_wrapper=self._client_wrapper) + self.workflow = AsyncWorkflowClient(client_wrapper=self._client_wrapper) diff --git a/src/superagent/core/client_wrapper.py b/src/superagent/core/client_wrapper.py index 4be8fa2..c716db4 100644 --- a/src/superagent/core/client_wrapper.py +++ b/src/superagent/core/client_wrapper.py @@ -14,7 +14,7 @@ def get_headers(self) -> typing.Dict[str, str]: headers: typing.Dict[str, str] = { "X-Fern-Language": "Python", "X-Fern-SDK-Name": "superagent-py", - "X-Fern-SDK-Version": "v0.0.60", + "X-Fern-SDK-Version": "v0.0.99", } token = self._get_token() if token is not None: diff --git a/src/superagent/resources/__init__.py b/src/superagent/resources/__init__.py index f86812f..1d175ae 100644 --- a/src/superagent/resources/__init__.py +++ b/src/superagent/resources/__init__.py @@ -1,17 +1,5 @@ # This file was auto-generated by Fern from our API Definition. -from . import agent, agent_documents, agent_tools, api_token, auth, documents, prompts, tags, tools, traces, user +from . import agent, api_user, datasource, llm, tool, workflow -__all__ = [ - "agent", - "agent_documents", - "agent_tools", - "api_token", - "auth", - "documents", - "prompts", - "tags", - "tools", - "traces", - "user", -] +__all__ = ["agent", "api_user", "datasource", "llm", "tool", "workflow"] diff --git a/src/superagent/resources/agent/client.py b/src/superagent/resources/agent/client.py index 76bd22f..366d5a1 100644 --- a/src/superagent/resources/agent/client.py +++ b/src/superagent/resources/agent/client.py @@ -10,10 +10,14 @@ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.agent_list_output import AgentListOutput -from ...types.agent_output import AgentOutput +from ...types.agent_datasosurce_list import AgentDatasosurceList +from ...types.agent_list import AgentList +from ...types.agent_run_list import AgentRunList +from ...types.agent_tool_list import AgentToolList +from ...types.app_models_request_agent import AppModelsRequestAgent +from ...types.app_models_response_agent import AppModelsResponseAgent +from ...types.app_models_response_agent_invoke import AppModelsResponseAgentInvoke from ...types.http_validation_error import HttpValidationError -from ...types.predict_agent_output import PredictAgentOutput # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) @@ -23,7 +27,7 @@ class AgentClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def list_all_agents(self) -> AgentListOutput: + def list(self) -> AgentList: """ List all agents """ @@ -34,62 +38,136 @@ def list_all_agents(self) -> AgentListOutput: timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AgentList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def create_agent( - self, - *, - name: str, - type: str, - description: typing.Optional[str] = OMIT, - avatar_url: typing.Optional[str] = OMIT, - llm: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - has_memory: typing.Optional[bool] = OMIT, - prompt_id: typing.Optional[str] = OMIT, - ) -> AgentOutput: + def create(self, *, request: AppModelsRequestAgent) -> AppModelsResponseAgent: """ Create a new agent Parameters: - - name: str. + - request: AppModelsRequestAgent. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) - - type: str. + def get(self, agent_id: str) -> AppModelsResponseAgent: + """ + Get a single agent - - description: typing.Optional[str]. + Parameters: + - agent_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) - - avatar_url: typing.Optional[str]. + def update(self, agent_id: str, *, request: AppModelsRequestAgent) -> AppModelsResponseAgent: + """ + Patch an agent - - llm: typing.Optional[typing.Dict[str, typing.Any]]. + Parameters: + - agent_id: str. - - has_memory: typing.Optional[bool]. + - request: AppModelsRequestAgent. + """ + _response = self._client_wrapper.httpx_client.request( + "PATCH", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, agent_id: str) -> typing.Any: + """ + Delete an agent - - prompt_id: typing.Optional[str]. + Parameters: + - agent_id: str. """ - _request: typing.Dict[str, typing.Any] = {"name": name, "type": type} - if description is not OMIT: - _request["description"] = description - if avatar_url is not OMIT: - _request["avatarUrl"] = avatar_url - if llm is not OMIT: - _request["llm"] = llm - if has_memory is not OMIT: - _request["hasMemory"] = has_memory - if prompt_id is not OMIT: - _request["promptId"] = prompt_id + _response = self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def invoke( + self, agent_id: str, *, input: str, session_id: typing.Optional[str] = OMIT, enable_streaming: bool + ) -> AppModelsResponseAgentInvoke: + """ + Invoke an agent + + Parameters: + - agent_id: str. + + - input: str. + + - session_id: typing.Optional[str]. + + - enable_streaming: bool. + """ + _request: typing.Dict[str, typing.Any] = {"input": input, "enableStreaming": enable_streaming} + if session_id is not OMIT: + _request["sessionId"] = session_id _response = self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/invoke"), json=jsonable_encoder(_request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseAgentInvoke, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -98,39 +176,72 @@ def create_agent( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def list_library_agents(self) -> AgentListOutput: + def add_llm(self, agent_id: str, *, llm_id: str) -> AppModelsResponseAgent: """ - List all library agents + Add LLM to agent + + Parameters: + - agent_id: str. + + - llm_id: str. """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents/library"), + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/llms"), + json=jsonable_encoder({"llmId": llm_id}), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def remove_llm(self, agent_id: str, llm_id: str) -> typing.Any: + """ + Remove LLM from agent + + Parameters: + - agent_id: str. + + - llm_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/llms/{llm_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get_agent(self, agent_id: str) -> AgentOutput: + def list_tools(self, agent_id: str) -> AgentToolList: """ - Get a specific agent + List agent tools Parameters: - agent_id: str. """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/tools"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AgentToolList, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -139,24 +250,24 @@ def get_agent(self, agent_id: str) -> AgentOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def patch_agent(self, agent_id: str, *, request: typing.Dict[str, typing.Any]) -> AgentOutput: + def add_tool(self, agent_id: str, *, tool_id: str) -> AppModelsResponseAgent: """ - Patch a specific agent + Add tool to agent Parameters: - agent_id: str. - - request: typing.Dict[str, typing.Any]. + - tool_id: str. """ _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), - json=jsonable_encoder(request), + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/tools"), + json=jsonable_encoder({"toolId": tool_id}), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -165,21 +276,25 @@ def patch_agent(self, agent_id: str, *, request: typing.Dict[str, typing.Any]) - raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def delete_agent(self, agent_id: str) -> AgentOutput: + def remove_tool(self, agent_id: str, tool_id: str) -> typing.Any: """ - Delete a specific agent + Remove tool from agent Parameters: - agent_id: str. + + - tool_id: str. """ _response = self._client_wrapper.httpx_client.request( "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/tools/{tool_id}" + ), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -188,45 +303,97 @@ def delete_agent(self, agent_id: str) -> AgentOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def prompt_agent( - self, - agent_id: str, - *, - input: typing.Dict[str, typing.Any], - has_streaming: typing.Optional[bool] = OMIT, - session: typing.Optional[str] = OMIT, - cache_ttl: typing.Optional[int] = OMIT, - ) -> PredictAgentOutput: + def list_datasources(self, agent_id: str) -> AgentDatasosurceList: """ - Invoke a specific agent + List agent datasources Parameters: - agent_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/datasources"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AgentDatasosurceList, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) - - input: typing.Dict[str, typing.Any]. - - - has_streaming: typing.Optional[bool]. + def add_datasource(self, agent_id: str, *, datasource_id: str) -> AppModelsResponseAgent: + """ + Add datasource to agent - - session: typing.Optional[str]. + Parameters: + - agent_id: str. - - cache_ttl: typing.Optional[int]. + - datasource_id: str. """ - _request: typing.Dict[str, typing.Any] = {"input": input} - if has_streaming is not OMIT: - _request["has_streaming"] = has_streaming - if session is not OMIT: - _request["session"] = session - if cache_ttl is not OMIT: - _request["cache_ttl"] = cache_ttl _response = self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/predict"), - json=jsonable_encoder(_request), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/datasources"), + json=jsonable_encoder({"datasourceId": datasource_id}), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def remove_datasource(self, agent_id: str, datasource_id: str) -> typing.Any: + """ + Remove datasource from agent + + Parameters: + - agent_id: str. + + - datasource_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/datasources/{datasource_id}" + ), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def list_runs(self, agent_id: str) -> AgentRunList: + """ + List agent runs + + Parameters: + - agent_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/runs"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PredictAgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AgentRunList, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -240,7 +407,7 @@ class AsyncAgentClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def list_all_agents(self) -> AgentListOutput: + async def list(self) -> AgentList: """ List all agents """ @@ -251,62 +418,136 @@ async def list_all_agents(self) -> AgentListOutput: timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AgentList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def create_agent( - self, - *, - name: str, - type: str, - description: typing.Optional[str] = OMIT, - avatar_url: typing.Optional[str] = OMIT, - llm: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - has_memory: typing.Optional[bool] = OMIT, - prompt_id: typing.Optional[str] = OMIT, - ) -> AgentOutput: + async def create(self, *, request: AppModelsRequestAgent) -> AppModelsResponseAgent: """ Create a new agent Parameters: - - name: str. + - request: AppModelsRequestAgent. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) - - type: str. + async def get(self, agent_id: str) -> AppModelsResponseAgent: + """ + Get a single agent - - description: typing.Optional[str]. + Parameters: + - agent_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) - - avatar_url: typing.Optional[str]. + async def update(self, agent_id: str, *, request: AppModelsRequestAgent) -> AppModelsResponseAgent: + """ + Patch an agent - - llm: typing.Optional[typing.Dict[str, typing.Any]]. + Parameters: + - agent_id: str. - - has_memory: typing.Optional[bool]. + - request: AppModelsRequestAgent. + """ + _response = await self._client_wrapper.httpx_client.request( + "PATCH", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, agent_id: str) -> typing.Any: + """ + Delete an agent - - prompt_id: typing.Optional[str]. + Parameters: + - agent_id: str. """ - _request: typing.Dict[str, typing.Any] = {"name": name, "type": type} - if description is not OMIT: - _request["description"] = description - if avatar_url is not OMIT: - _request["avatarUrl"] = avatar_url - if llm is not OMIT: - _request["llm"] = llm - if has_memory is not OMIT: - _request["hasMemory"] = has_memory - if prompt_id is not OMIT: - _request["promptId"] = prompt_id + _response = await self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def invoke( + self, agent_id: str, *, input: str, session_id: typing.Optional[str] = OMIT, enable_streaming: bool + ) -> AppModelsResponseAgentInvoke: + """ + Invoke an agent + + Parameters: + - agent_id: str. + + - input: str. + + - session_id: typing.Optional[str]. + + - enable_streaming: bool. + """ + _request: typing.Dict[str, typing.Any] = {"input": input, "enableStreaming": enable_streaming} + if session_id is not OMIT: + _request["sessionId"] = session_id _response = await self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/invoke"), json=jsonable_encoder(_request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseAgentInvoke, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -315,39 +556,72 @@ async def create_agent( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def list_library_agents(self) -> AgentListOutput: + async def add_llm(self, agent_id: str, *, llm_id: str) -> AppModelsResponseAgent: """ - List all library agents + Add LLM to agent + + Parameters: + - agent_id: str. + + - llm_id: str. """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agents/library"), + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/llms"), + json=jsonable_encoder({"llmId": llm_id}), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def remove_llm(self, agent_id: str, llm_id: str) -> typing.Any: + """ + Remove LLM from agent + + Parameters: + - agent_id: str. + + - llm_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/llms/{llm_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def get_agent(self, agent_id: str) -> AgentOutput: + async def list_tools(self, agent_id: str) -> AgentToolList: """ - Get a specific agent + List agent tools Parameters: - agent_id: str. """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/tools"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AgentToolList, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -356,24 +630,24 @@ async def get_agent(self, agent_id: str) -> AgentOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def patch_agent(self, agent_id: str, *, request: typing.Dict[str, typing.Any]) -> AgentOutput: + async def add_tool(self, agent_id: str, *, tool_id: str) -> AppModelsResponseAgent: """ - Patch a specific agent + Add tool to agent Parameters: - agent_id: str. - - request: typing.Dict[str, typing.Any]. + - tool_id: str. """ _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), - json=jsonable_encoder(request), + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/tools"), + json=jsonable_encoder({"toolId": tool_id}), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -382,21 +656,25 @@ async def patch_agent(self, agent_id: str, *, request: typing.Dict[str, typing.A raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def delete_agent(self, agent_id: str) -> AgentOutput: + async def remove_tool(self, agent_id: str, tool_id: str) -> typing.Any: """ - Delete a specific agent + Remove tool from agent Parameters: - agent_id: str. + + - tool_id: str. """ _response = await self._client_wrapper.httpx_client.request( "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}"), + urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/tools/{tool_id}" + ), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -405,45 +683,97 @@ async def delete_agent(self, agent_id: str) -> AgentOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def prompt_agent( - self, - agent_id: str, - *, - input: typing.Dict[str, typing.Any], - has_streaming: typing.Optional[bool] = OMIT, - session: typing.Optional[str] = OMIT, - cache_ttl: typing.Optional[int] = OMIT, - ) -> PredictAgentOutput: + async def list_datasources(self, agent_id: str) -> AgentDatasosurceList: """ - Invoke a specific agent + List agent datasources Parameters: - agent_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/datasources"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AgentDatasosurceList, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) - - input: typing.Dict[str, typing.Any]. - - - has_streaming: typing.Optional[bool]. + async def add_datasource(self, agent_id: str, *, datasource_id: str) -> AppModelsResponseAgent: + """ + Add datasource to agent - - session: typing.Optional[str]. + Parameters: + - agent_id: str. - - cache_ttl: typing.Optional[int]. + - datasource_id: str. """ - _request: typing.Dict[str, typing.Any] = {"input": input} - if has_streaming is not OMIT: - _request["has_streaming"] = has_streaming - if session is not OMIT: - _request["session"] = session - if cache_ttl is not OMIT: - _request["cache_ttl"] = cache_ttl _response = await self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/predict"), - json=jsonable_encoder(_request), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/datasources"), + json=jsonable_encoder({"datasourceId": datasource_id}), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseAgent, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def remove_datasource(self, agent_id: str, datasource_id: str) -> typing.Any: + """ + Remove datasource from agent + + Parameters: + - agent_id: str. + + - datasource_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/datasources/{datasource_id}" + ), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def list_runs(self, agent_id: str) -> AgentRunList: + """ + List agent runs + + Parameters: + - agent_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agents/{agent_id}/runs"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PredictAgentOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AgentRunList, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: diff --git a/src/superagent/resources/agent_documents/client.py b/src/superagent/resources/agent_documents/client.py deleted file mode 100644 index 8cb2b35..0000000 --- a/src/superagent/resources/agent_documents/client.py +++ /dev/null @@ -1,229 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import pydantic - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.remove_none_from_dict import remove_none_from_dict -from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.agent_document_list_ouput import AgentDocumentListOuput -from ...types.agent_document_output import AgentDocumentOutput -from ...types.http_validation_error import HttpValidationError - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class AgentDocumentsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_agent_documents(self, *, expand: typing.Optional[bool] = None) -> AgentDocumentListOuput: - """ - List all agent documents - - Parameters: - - expand: typing.Optional[bool]. - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-documents"), - params=remove_none_from_dict({"expand": expand}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentListOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def create_agent_document(self, *, agent_id: str, document_id: str) -> AgentDocumentOutput: - """ - Create a agent document - - Parameters: - - agent_id: str. - - - document_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-documents"), - json=jsonable_encoder({"agentId": agent_id, "documentId": document_id}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def get_agent_document(self, agent_document_id: str) -> AgentDocumentOutput: - """ - Get a specific agent document - - Parameters: - - agent_document_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( - f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-documents/{agent_document_id}" - ), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def delete_agent_document(self, agent_document_id: str) -> AgentDocumentOutput: - """ - Delete a specific agent document - - Parameters: - - agent_document_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( - f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-documents/{agent_document_id}" - ), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncAgentDocumentsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_agent_documents(self, *, expand: typing.Optional[bool] = None) -> AgentDocumentListOuput: - """ - List all agent documents - - Parameters: - - expand: typing.Optional[bool]. - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-documents"), - params=remove_none_from_dict({"expand": expand}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentListOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def create_agent_document(self, *, agent_id: str, document_id: str) -> AgentDocumentOutput: - """ - Create a agent document - - Parameters: - - agent_id: str. - - - document_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-documents"), - json=jsonable_encoder({"agentId": agent_id, "documentId": document_id}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def get_agent_document(self, agent_document_id: str) -> AgentDocumentOutput: - """ - Get a specific agent document - - Parameters: - - agent_document_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin( - f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-documents/{agent_document_id}" - ), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def delete_agent_document(self, agent_document_id: str) -> AgentDocumentOutput: - """ - Delete a specific agent document - - Parameters: - - agent_document_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin( - f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-documents/{agent_document_id}" - ), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentDocumentOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/resources/agent_tools/client.py b/src/superagent/resources/agent_tools/client.py deleted file mode 100644 index c99ef91..0000000 --- a/src/superagent/resources/agent_tools/client.py +++ /dev/null @@ -1,221 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import pydantic - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...core.remove_none_from_dict import remove_none_from_dict -from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.agent_tool_list_output import AgentToolListOutput -from ...types.agent_tool_output import AgentToolOutput -from ...types.http_validation_error import HttpValidationError - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class AgentToolsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_agent_tools(self, *, expand: typing.Optional[bool] = None) -> AgentToolListOutput: - """ - List all agent tools - - Parameters: - - expand: typing.Optional[bool]. - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-tools"), - params=remove_none_from_dict({"expand": expand}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolListOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def create_agent_tool(self, *, agent_id: str, tool_id: str) -> AgentToolOutput: - """ - Create a agent tool - - Parameters: - - agent_id: str. - - - tool_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-tools"), - json=jsonable_encoder({"agentId": agent_id, "toolId": tool_id}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def get_agent_tool(self, agent_tool_id: str) -> AgentToolOutput: - """ - Get a specific agent tool - - Parameters: - - agent_tool_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-tools/{agent_tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def delete_agent_tool(self, agent_tool_id: str) -> AgentToolOutput: - """ - Delete a specific agent tool - - Parameters: - - agent_tool_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-tools/{agent_tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncAgentToolsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_agent_tools(self, *, expand: typing.Optional[bool] = None) -> AgentToolListOutput: - """ - List all agent tools - - Parameters: - - expand: typing.Optional[bool]. - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-tools"), - params=remove_none_from_dict({"expand": expand}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolListOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def create_agent_tool(self, *, agent_id: str, tool_id: str) -> AgentToolOutput: - """ - Create a agent tool - - Parameters: - - agent_id: str. - - - tool_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/agent-tools"), - json=jsonable_encoder({"agentId": agent_id, "toolId": tool_id}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def get_agent_tool(self, agent_tool_id: str) -> AgentToolOutput: - """ - Get a specific agent tool - - Parameters: - - agent_tool_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-tools/{agent_tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def delete_agent_tool(self, agent_tool_id: str) -> AgentToolOutput: - """ - Delete a specific agent tool - - Parameters: - - agent_tool_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/agent-tools/{agent_tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(AgentToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/resources/agent_documents/__init__.py b/src/superagent/resources/api_user/__init__.py similarity index 100% rename from src/superagent/resources/agent_documents/__init__.py rename to src/superagent/resources/api_user/__init__.py diff --git a/src/superagent/resources/user/client.py b/src/superagent/resources/api_user/client.py similarity index 53% rename from src/superagent/resources/user/client.py rename to src/superagent/resources/api_user/client.py index 411211f..0941e43 100644 --- a/src/superagent/resources/user/client.py +++ b/src/superagent/resources/api_user/client.py @@ -1,5 +1,6 @@ # This file was auto-generated by Fern from our API Definition. +import typing import urllib.parse from json.decoder import JSONDecodeError @@ -7,45 +8,61 @@ from ...core.api_error import ApiError from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.http_validation_error import HttpValidationError -from ...types.user_output import UserOutput +from ...types.api_user import ApiUser -class UserClient: +class ApiUserClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def read_user_me(self) -> UserOutput: + def create(self) -> ApiUser: + """ + Create a new API user + """ _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/users/me"), + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(UserOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(ApiUser, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def read_user(self, user_id: str) -> UserOutput: + def get(self) -> ApiUser: """ - Parameters: - - user_id: str. + Get a single api user """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/users/{user_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(ApiUser, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self) -> typing.Any: + """ + Delete an api user + """ + _response = self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(UserOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: @@ -53,40 +70,58 @@ def read_user(self, user_id: str) -> UserOutput: raise ApiError(status_code=_response.status_code, body=_response_json) -class AsyncUserClient: +class AsyncApiUserClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def read_user_me(self) -> UserOutput: + async def create(self) -> ApiUser: + """ + Create a new API user + """ _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/users/me"), + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(UserOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(ApiUser, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def read_user(self, user_id: str) -> UserOutput: + async def get(self) -> ApiUser: """ - Parameters: - - user_id: str. + Get a single api user """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/users/{user_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(ApiUser, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self) -> typing.Any: + """ + Delete an api user + """ + _response = await self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-users/me"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(UserOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: diff --git a/src/superagent/resources/auth/client.py b/src/superagent/resources/auth/client.py deleted file mode 100644 index 5b4467f..0000000 --- a/src/superagent/resources/auth/client.py +++ /dev/null @@ -1,225 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import pydantic - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.http_validation_error import HttpValidationError -from ...types.sign_in_output import SignInOutput - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class AuthClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def sign_in(self, *, email: str, password: str) -> SignInOutput: - """ - Parameters: - - email: str. - - - password: str. - """ - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/auth/sign-in"), - json=jsonable_encoder({"email": email, "password": password}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(SignInOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def sign_up( - self, - *, - email: str, - password: str, - name: typing.Optional[str] = OMIT, - metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - ) -> SignInOutput: - """ - Parameters: - - email: str. - - - password: str. - - - name: typing.Optional[str]. - - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. - """ - _request: typing.Dict[str, typing.Any] = {"email": email, "password": password} - if name is not OMIT: - _request["name"] = name - if metadata is not OMIT: - _request["metadata"] = metadata - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/auth/sign-up"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(SignInOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def oauth_handler( - self, *, email: str, name: str, access_token: typing.Optional[str] = OMIT, provider: typing.Optional[str] = OMIT - ) -> typing.Any: - """ - Parameters: - - email: str. - - - name: str. - - - access_token: typing.Optional[str]. - - - provider: typing.Optional[str]. - """ - _request: typing.Dict[str, typing.Any] = {"email": email, "name": name} - if access_token is not OMIT: - _request["access_token"] = access_token - if provider is not OMIT: - _request["provider"] = provider - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/auth/oauth/callback"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncAuthClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def sign_in(self, *, email: str, password: str) -> SignInOutput: - """ - Parameters: - - email: str. - - - password: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/auth/sign-in"), - json=jsonable_encoder({"email": email, "password": password}), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(SignInOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def sign_up( - self, - *, - email: str, - password: str, - name: typing.Optional[str] = OMIT, - metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - ) -> SignInOutput: - """ - Parameters: - - email: str. - - - password: str. - - - name: typing.Optional[str]. - - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. - """ - _request: typing.Dict[str, typing.Any] = {"email": email, "password": password} - if name is not OMIT: - _request["name"] = name - if metadata is not OMIT: - _request["metadata"] = metadata - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/auth/sign-up"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(SignInOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def oauth_handler( - self, *, email: str, name: str, access_token: typing.Optional[str] = OMIT, provider: typing.Optional[str] = OMIT - ) -> typing.Any: - """ - Parameters: - - email: str. - - - name: str. - - - access_token: typing.Optional[str]. - - - provider: typing.Optional[str]. - """ - _request: typing.Dict[str, typing.Any] = {"email": email, "name": name} - if access_token is not OMIT: - _request["access_token"] = access_token - if provider is not OMIT: - _request["provider"] = provider - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/auth/oauth/callback"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/resources/agent_tools/__init__.py b/src/superagent/resources/datasource/__init__.py similarity index 100% rename from src/superagent/resources/agent_tools/__init__.py rename to src/superagent/resources/datasource/__init__.py diff --git a/src/superagent/resources/prompts/client.py b/src/superagent/resources/datasource/client.py similarity index 70% rename from src/superagent/resources/prompts/client.py rename to src/superagent/resources/datasource/client.py index beb4115..6aa658f 100644 --- a/src/superagent/resources/prompts/client.py +++ b/src/superagent/resources/datasource/client.py @@ -10,56 +10,53 @@ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder from ...errors.unprocessable_entity_error import UnprocessableEntityError +from ...types.app_models_request_datasource import AppModelsRequestDatasource +from ...types.app_models_response_datasource import AppModelsResponseDatasource +from ...types.datasource_list import DatasourceList from ...types.http_validation_error import HttpValidationError -from ...types.prompt_list_output import PromptListOutput -from ...types.prompt_output import PromptOutput # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) -class PromptsClient: +class DatasourceClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def list_prompts(self) -> PromptListOutput: + def list(self) -> DatasourceList: """ - List all prompts + List all datasources """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/prompts"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(DatasourceList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def create_a_prompt(self, *, name: str, input_variables: typing.List[typing.Any], template: str) -> PromptOutput: + def create(self, *, request: AppModelsRequestDatasource) -> AppModelsResponseDatasource: """ - Create a new prompt + Create a new datasource Parameters: - - name: str. - - - input_variables: typing.List[typing.Any]. - - - template: str. + - request: AppModelsRequestDatasource. """ _response = self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/prompts"), - json=jsonable_encoder({"name": name, "input_variables": input_variables, "template": template}), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -68,21 +65,21 @@ def create_a_prompt(self, *, name: str, input_variables: typing.List[typing.Any] raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get_prompt(self, prompt_id: str) -> PromptOutput: + def get(self, datasource_id: str) -> AppModelsResponseDatasource: """ - Get a specific prompt + Get a specific datasource Parameters: - - prompt_id: str. + - datasource_id: str. """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/prompts/{prompt_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{datasource_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -91,24 +88,24 @@ def get_prompt(self, prompt_id: str) -> PromptOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def patch_prompt(self, prompt_id: str, *, request: typing.Dict[str, typing.Any]) -> PromptOutput: + def update(self, datasource_id: str, *, request: AppModelsRequestDatasource) -> AppModelsResponseDatasource: """ - Patch a specific prompt + Update a specific datasource Parameters: - - prompt_id: str. + - datasource_id: str. - - request: typing.Dict[str, typing.Any]. + - request: AppModelsRequestDatasource. """ _response = self._client_wrapper.httpx_client.request( "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/prompts/{prompt_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{datasource_id}"), json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -117,21 +114,21 @@ def patch_prompt(self, prompt_id: str, *, request: typing.Dict[str, typing.Any]) raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def delete_prompt(self, prompt_id: str) -> PromptOutput: + def delete(self, datasource_id: str) -> typing.Any: """ - Delete a specific prompt + Delete a specific datasource Parameters: - - prompt_id: str. + - datasource_id: str. """ _response = self._client_wrapper.httpx_client.request( "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/prompts/{prompt_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{datasource_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -141,50 +138,44 @@ def delete_prompt(self, prompt_id: str) -> PromptOutput: raise ApiError(status_code=_response.status_code, body=_response_json) -class AsyncPromptsClient: +class AsyncDatasourceClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def list_prompts(self) -> PromptListOutput: + async def list(self) -> DatasourceList: """ - List all prompts + List all datasources """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/prompts"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(DatasourceList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def create_a_prompt( - self, *, name: str, input_variables: typing.List[typing.Any], template: str - ) -> PromptOutput: + async def create(self, *, request: AppModelsRequestDatasource) -> AppModelsResponseDatasource: """ - Create a new prompt + Create a new datasource Parameters: - - name: str. - - - input_variables: typing.List[typing.Any]. - - - template: str. + - request: AppModelsRequestDatasource. """ _response = await self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/prompts"), - json=jsonable_encoder({"name": name, "input_variables": input_variables, "template": template}), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/datasources"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -193,21 +184,21 @@ async def create_a_prompt( raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def get_prompt(self, prompt_id: str) -> PromptOutput: + async def get(self, datasource_id: str) -> AppModelsResponseDatasource: """ - Get a specific prompt + Get a specific datasource Parameters: - - prompt_id: str. + - datasource_id: str. """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/prompts/{prompt_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{datasource_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -216,24 +207,24 @@ async def get_prompt(self, prompt_id: str) -> PromptOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def patch_prompt(self, prompt_id: str, *, request: typing.Dict[str, typing.Any]) -> PromptOutput: + async def update(self, datasource_id: str, *, request: AppModelsRequestDatasource) -> AppModelsResponseDatasource: """ - Patch a specific prompt + Update a specific datasource Parameters: - - prompt_id: str. + - datasource_id: str. - - request: typing.Dict[str, typing.Any]. + - request: AppModelsRequestDatasource. """ _response = await self._client_wrapper.httpx_client.request( "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/prompts/{prompt_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{datasource_id}"), json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseDatasource, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -242,21 +233,21 @@ async def patch_prompt(self, prompt_id: str, *, request: typing.Dict[str, typing raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def delete_prompt(self, prompt_id: str) -> PromptOutput: + async def delete(self, datasource_id: str) -> typing.Any: """ - Delete a specific prompt + Delete a specific datasource Parameters: - - prompt_id: str. + - datasource_id: str. """ _response = await self._client_wrapper.httpx_client.request( "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/prompts/{prompt_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/datasources/{datasource_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(PromptOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: diff --git a/src/superagent/resources/documents/client.py b/src/superagent/resources/documents/client.py deleted file mode 100644 index 7e287b0..0000000 --- a/src/superagent/resources/documents/client.py +++ /dev/null @@ -1,352 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import pydantic - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.document_list_output import DocumentListOutput -from ...types.document_ouput import DocumentOuput -from ...types.http_validation_error import HttpValidationError - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class DocumentsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_documents(self) -> DocumentListOutput: - """ - List all documents - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/documents"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentListOutput, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def create_document( - self, - *, - type: str, - url: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - content: typing.Optional[str] = OMIT, - name: str, - authorization: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - from_page: typing.Optional[int] = OMIT, - to_page: typing.Optional[int] = OMIT, - splitter: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - ) -> DocumentOuput: - """ - Create a new document - - Parameters: - - type: str. - - - url: typing.Optional[str]. - - - description: typing.Optional[str]. - - - content: typing.Optional[str]. - - - name: str. - - - authorization: typing.Optional[typing.Dict[str, typing.Any]]. - - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. - - - from_page: typing.Optional[int]. - - - to_page: typing.Optional[int]. - - - splitter: typing.Optional[typing.Dict[str, typing.Any]]. - """ - _request: typing.Dict[str, typing.Any] = {"type": type, "name": name} - if url is not OMIT: - _request["url"] = url - if description is not OMIT: - _request["description"] = description - if content is not OMIT: - _request["content"] = content - if authorization is not OMIT: - _request["authorization"] = authorization - if metadata is not OMIT: - _request["metadata"] = metadata - if from_page is not OMIT: - _request["from_page"] = from_page - if to_page is not OMIT: - _request["to_page"] = to_page - if splitter is not OMIT: - _request["splitter"] = splitter - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/documents"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def get_document(self, document_id: str) -> DocumentOuput: - """ - Get a specific document - - Parameters: - - document_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/documents/{document_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def patch_document(self, document_id: str, *, request: typing.Dict[str, typing.Any]) -> DocumentOuput: - """ - Patch a specific document - - Parameters: - - document_id: str. - - - request: typing.Dict[str, typing.Any]. - """ - _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/documents/{document_id}"), - json=jsonable_encoder(request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def delete_document(self, document_id: str) -> DocumentOuput: - """ - Delete a specific document - - Parameters: - - document_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/documents/{document_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncDocumentsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_documents(self) -> DocumentListOutput: - """ - List all documents - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/documents"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentListOutput, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def create_document( - self, - *, - type: str, - url: typing.Optional[str] = OMIT, - description: typing.Optional[str] = OMIT, - content: typing.Optional[str] = OMIT, - name: str, - authorization: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - from_page: typing.Optional[int] = OMIT, - to_page: typing.Optional[int] = OMIT, - splitter: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - ) -> DocumentOuput: - """ - Create a new document - - Parameters: - - type: str. - - - url: typing.Optional[str]. - - - description: typing.Optional[str]. - - - content: typing.Optional[str]. - - - name: str. - - - authorization: typing.Optional[typing.Dict[str, typing.Any]]. - - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. - - - from_page: typing.Optional[int]. - - - to_page: typing.Optional[int]. - - - splitter: typing.Optional[typing.Dict[str, typing.Any]]. - """ - _request: typing.Dict[str, typing.Any] = {"type": type, "name": name} - if url is not OMIT: - _request["url"] = url - if description is not OMIT: - _request["description"] = description - if content is not OMIT: - _request["content"] = content - if authorization is not OMIT: - _request["authorization"] = authorization - if metadata is not OMIT: - _request["metadata"] = metadata - if from_page is not OMIT: - _request["from_page"] = from_page - if to_page is not OMIT: - _request["to_page"] = to_page - if splitter is not OMIT: - _request["splitter"] = splitter - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/documents"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def get_document(self, document_id: str) -> DocumentOuput: - """ - Get a specific document - - Parameters: - - document_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/documents/{document_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def patch_document(self, document_id: str, *, request: typing.Dict[str, typing.Any]) -> DocumentOuput: - """ - Patch a specific document - - Parameters: - - document_id: str. - - - request: typing.Dict[str, typing.Any]. - """ - _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/documents/{document_id}"), - json=jsonable_encoder(request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def delete_document(self, document_id: str) -> DocumentOuput: - """ - Delete a specific document - - Parameters: - - document_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/documents/{document_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(DocumentOuput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/resources/api_token/__init__.py b/src/superagent/resources/llm/__init__.py similarity index 100% rename from src/superagent/resources/api_token/__init__.py rename to src/superagent/resources/llm/__init__.py diff --git a/src/superagent/resources/api_token/client.py b/src/superagent/resources/llm/client.py similarity index 72% rename from src/superagent/resources/api_token/client.py rename to src/superagent/resources/llm/client.py index a6f0eac..9f9fd18 100644 --- a/src/superagent/resources/api_token/client.py +++ b/src/superagent/resources/llm/client.py @@ -10,52 +10,53 @@ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.api_token_list_output import ApiTokenListOutput -from ...types.api_token_output import ApiTokenOutput +from ...types.app_models_request_llm import AppModelsRequestLlm +from ...types.app_models_response_llm import AppModelsResponseLlm from ...types.http_validation_error import HttpValidationError +from ...types.llm_list import LlmList # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) -class ApiTokenClient: +class LlmClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def list_api_tokens(self) -> ApiTokenListOutput: + def list(self) -> LlmList: """ - List all API tokens + List all LLMs """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-tokens"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(LlmList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def create_api_token(self, *, description: str) -> ApiTokenOutput: + def create(self, *, request: AppModelsRequestLlm) -> AppModelsResponseLlm: """ - Create a new API token + Create a new LLM Parameters: - - description: str. + - request: AppModelsRequestLlm. """ _response = self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-tokens"), - json=jsonable_encoder({"description": description}), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -64,21 +65,21 @@ def create_api_token(self, *, description: str) -> ApiTokenOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get_api_token(self, token_id: str) -> ApiTokenOutput: + def get(self, llm_id: str) -> AppModelsResponseLlm: """ - Get a specific API token + Get a single LLM Parameters: - - token_id: str. + - llm_id: str. """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-tokens/{token_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{llm_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -87,21 +88,24 @@ def get_api_token(self, token_id: str) -> ApiTokenOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def delete_api_token(self, token_id: str) -> ApiTokenOutput: + def update(self, llm_id: str, *, request: AppModelsRequestLlm) -> AppModelsResponseLlm: """ - Delete a specific API token + Patch an LLM Parameters: - - token_id: str. + - llm_id: str. + + - request: AppModelsRequestLlm. """ _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-tokens/{token_id}"), + "PATCH", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{llm_id}"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -111,44 +115,44 @@ def delete_api_token(self, token_id: str) -> ApiTokenOutput: raise ApiError(status_code=_response.status_code, body=_response_json) -class AsyncApiTokenClient: +class AsyncLlmClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def list_api_tokens(self) -> ApiTokenListOutput: + async def list(self) -> LlmList: """ - List all API tokens + List all LLMs """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-tokens"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(LlmList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def create_api_token(self, *, description: str) -> ApiTokenOutput: + async def create(self, *, request: AppModelsRequestLlm) -> AppModelsResponseLlm: """ - Create a new API token + Create a new LLM Parameters: - - description: str. + - request: AppModelsRequestLlm. """ _response = await self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/api-tokens"), - json=jsonable_encoder({"description": description}), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/llms"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -157,21 +161,21 @@ async def create_api_token(self, *, description: str) -> ApiTokenOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def get_api_token(self, token_id: str) -> ApiTokenOutput: + async def get(self, llm_id: str) -> AppModelsResponseLlm: """ - Get a specific API token + Get a single LLM Parameters: - - token_id: str. + - llm_id: str. """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-tokens/{token_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{llm_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -180,21 +184,24 @@ async def get_api_token(self, token_id: str) -> ApiTokenOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def delete_api_token(self, token_id: str) -> ApiTokenOutput: + async def update(self, llm_id: str, *, request: AppModelsRequestLlm) -> AppModelsResponseLlm: """ - Delete a specific API token + Patch an LLM Parameters: - - token_id: str. + - llm_id: str. + + - request: AppModelsRequestLlm. """ _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/api-tokens/{token_id}"), + "PATCH", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/llms/{llm_id}"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ApiTokenOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseLlm, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: diff --git a/src/superagent/resources/prompts/__init__.py b/src/superagent/resources/prompts/__init__.py deleted file mode 100644 index f3ea265..0000000 --- a/src/superagent/resources/prompts/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/src/superagent/resources/tags/__init__.py b/src/superagent/resources/tags/__init__.py deleted file mode 100644 index f3ea265..0000000 --- a/src/superagent/resources/tags/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/src/superagent/resources/auth/__init__.py b/src/superagent/resources/tool/__init__.py similarity index 100% rename from src/superagent/resources/auth/__init__.py rename to src/superagent/resources/tool/__init__.py diff --git a/src/superagent/resources/tags/client.py b/src/superagent/resources/tool/client.py similarity index 73% rename from src/superagent/resources/tags/client.py rename to src/superagent/resources/tool/client.py index e2157b6..7df5a88 100644 --- a/src/superagent/resources/tags/client.py +++ b/src/superagent/resources/tool/client.py @@ -10,54 +10,53 @@ from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.jsonable_encoder import jsonable_encoder from ...errors.unprocessable_entity_error import UnprocessableEntityError +from ...types.app_models_request_tool import AppModelsRequestTool +from ...types.app_models_response_tool import AppModelsResponseTool from ...types.http_validation_error import HttpValidationError -from ...types.tag_list_output import TagListOutput -from ...types.tag_output import TagOutput +from ...types.tool_list import ToolList # this is used as the default value for optional parameters OMIT = typing.cast(typing.Any, ...) -class TagsClient: +class ToolClient: def __init__(self, *, client_wrapper: SyncClientWrapper): self._client_wrapper = client_wrapper - def list_tags(self) -> TagListOutput: + def list(self) -> ToolList: """ - List all tags + List all tools """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tags"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(ToolList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def create_a_tag(self, *, name: str, color: str) -> TagOutput: + def create(self, *, request: AppModelsRequestTool) -> AppModelsResponseTool: """ - Create a new tag + Create a new tool Parameters: - - name: str. - - - color: str. + - request: AppModelsRequestTool. """ _response = self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tags"), - json=jsonable_encoder({"name": name, "color": color}), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -66,21 +65,21 @@ def create_a_tag(self, *, name: str, color: str) -> TagOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def get_tag(self, tag_id: str) -> TagOutput: + def get(self, tool_id: str) -> AppModelsResponseTool: """ - Get a specific tag + Get a specific tool Parameters: - - tag_id: str. + - tool_id: str. """ _response = self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tags/{tag_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -89,24 +88,24 @@ def get_tag(self, tag_id: str) -> TagOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def patch_tag(self, tag_id: str, *, request: typing.Dict[str, typing.Any]) -> TagOutput: + def update(self, tool_id: str, *, request: AppModelsRequestTool) -> AppModelsResponseTool: """ - Patch a specific tag + Update a specific tool Parameters: - - tag_id: str. + - tool_id: str. - - request: typing.Dict[str, typing.Any]. + - request: AppModelsRequestTool. """ _response = self._client_wrapper.httpx_client.request( "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tags/{tag_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -115,21 +114,21 @@ def patch_tag(self, tag_id: str, *, request: typing.Dict[str, typing.Any]) -> Ta raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - def delete_tag(self, tag_id: str) -> TagOutput: + def delete(self, tool_id: str) -> typing.Any: """ - Delete a specific tag + Delete a specific tool Parameters: - - tag_id: str. + - tool_id: str. """ _response = self._client_wrapper.httpx_client.request( "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tags/{tag_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -139,46 +138,44 @@ def delete_tag(self, tag_id: str) -> TagOutput: raise ApiError(status_code=_response.status_code, body=_response_json) -class AsyncTagsClient: +class AsyncToolClient: def __init__(self, *, client_wrapper: AsyncClientWrapper): self._client_wrapper = client_wrapper - async def list_tags(self) -> TagListOutput: + async def list(self) -> ToolList: """ - List all tags + List all tools """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tags"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagListOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(ToolList, _response.json()) # type: ignore try: _response_json = _response.json() except JSONDecodeError: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def create_a_tag(self, *, name: str, color: str) -> TagOutput: + async def create(self, *, request: AppModelsRequestTool) -> AppModelsResponseTool: """ - Create a new tag + Create a new tool Parameters: - - name: str. - - - color: str. + - request: AppModelsRequestTool. """ _response = await self._client_wrapper.httpx_client.request( "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tags"), - json=jsonable_encoder({"name": name, "color": color}), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), + json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -187,21 +184,21 @@ async def create_a_tag(self, *, name: str, color: str) -> TagOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def get_tag(self, tag_id: str) -> TagOutput: + async def get(self, tool_id: str) -> AppModelsResponseTool: """ - Get a specific tag + Get a specific tool Parameters: - - tag_id: str. + - tool_id: str. """ _response = await self._client_wrapper.httpx_client.request( "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tags/{tag_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -210,24 +207,24 @@ async def get_tag(self, tag_id: str) -> TagOutput: raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def patch_tag(self, tag_id: str, *, request: typing.Dict[str, typing.Any]) -> TagOutput: + async def update(self, tool_id: str, *, request: AppModelsRequestTool) -> AppModelsResponseTool: """ - Patch a specific tag + Update a specific tool Parameters: - - tag_id: str. + - tool_id: str. - - request: typing.Dict[str, typing.Any]. + - request: AppModelsRequestTool. """ _response = await self._client_wrapper.httpx_client.request( "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tags/{tag_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), json=jsonable_encoder(request), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(AppModelsResponseTool, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: @@ -236,21 +233,21 @@ async def patch_tag(self, tag_id: str, *, request: typing.Dict[str, typing.Any]) raise ApiError(status_code=_response.status_code, body=_response.text) raise ApiError(status_code=_response.status_code, body=_response_json) - async def delete_tag(self, tag_id: str) -> TagOutput: + async def delete(self, tool_id: str) -> typing.Any: """ - Delete a specific tag + Delete a specific tool Parameters: - - tag_id: str. + - tool_id: str. """ _response = await self._client_wrapper.httpx_client.request( "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tags/{tag_id}"), + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), headers=self._client_wrapper.get_headers(), timeout=60, ) if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(TagOutput, _response.json()) # type: ignore + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore if _response.status_code == 422: raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore try: diff --git a/src/superagent/resources/tools/__init__.py b/src/superagent/resources/tools/__init__.py deleted file mode 100644 index f3ea265..0000000 --- a/src/superagent/resources/tools/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/src/superagent/resources/tools/client.py b/src/superagent/resources/tools/client.py deleted file mode 100644 index 15edf7e..0000000 --- a/src/superagent/resources/tools/client.py +++ /dev/null @@ -1,308 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import pydantic - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper -from ...core.jsonable_encoder import jsonable_encoder -from ...errors.unprocessable_entity_error import UnprocessableEntityError -from ...types.http_validation_error import HttpValidationError -from ...types.tool_list_output import ToolListOutput -from ...types.tool_output import ToolOutput - -# this is used as the default value for optional parameters -OMIT = typing.cast(typing.Any, ...) - - -class ToolsClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_tools(self) -> ToolListOutput: - """ - List all tools - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolListOutput, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def create_a_tool( - self, - *, - name: str, - type: str, - description: str, - return_direct: typing.Optional[bool] = OMIT, - authorization: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - ) -> ToolOutput: - """ - Create a new tool - - Parameters: - - name: str. - - - type: str. - - - description: str. - - - return_direct: typing.Optional[bool]. - - - authorization: typing.Optional[typing.Dict[str, typing.Any]]. - - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. - """ - _request: typing.Dict[str, typing.Any] = {"name": name, "type": type, "description": description} - if return_direct is not OMIT: - _request["returnDirect"] = return_direct - if authorization is not OMIT: - _request["authorization"] = authorization - if metadata is not OMIT: - _request["metadata"] = metadata - _response = self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def get_tool(self, tool_id: str) -> ToolOutput: - """ - Get a specific tool - - Parameters: - - tool_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def patch_tool(self, tool_id: str, *, request: typing.Dict[str, typing.Any]) -> ToolOutput: - """ - Patch a specific tool - - Parameters: - - tool_id: str. - - - request: typing.Dict[str, typing.Any]. - """ - _response = self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), - json=jsonable_encoder(request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - def delete_tool(self, tool_id: str) -> ToolOutput: - """ - Delete a specific tool - - Parameters: - - tool_id: str. - """ - _response = self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncToolsClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_tools(self) -> ToolListOutput: - """ - List all tools - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolListOutput, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def create_a_tool( - self, - *, - name: str, - type: str, - description: str, - return_direct: typing.Optional[bool] = OMIT, - authorization: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - metadata: typing.Optional[typing.Dict[str, typing.Any]] = OMIT, - ) -> ToolOutput: - """ - Create a new tool - - Parameters: - - name: str. - - - type: str. - - - description: str. - - - return_direct: typing.Optional[bool]. - - - authorization: typing.Optional[typing.Dict[str, typing.Any]]. - - - metadata: typing.Optional[typing.Dict[str, typing.Any]]. - """ - _request: typing.Dict[str, typing.Any] = {"name": name, "type": type, "description": description} - if return_direct is not OMIT: - _request["returnDirect"] = return_direct - if authorization is not OMIT: - _request["authorization"] = authorization - if metadata is not OMIT: - _request["metadata"] = metadata - _response = await self._client_wrapper.httpx_client.request( - "POST", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/tools"), - json=jsonable_encoder(_request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def get_tool(self, tool_id: str) -> ToolOutput: - """ - Get a specific tool - - Parameters: - - tool_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def patch_tool(self, tool_id: str, *, request: typing.Dict[str, typing.Any]) -> ToolOutput: - """ - Patch a specific tool - - Parameters: - - tool_id: str. - - - request: typing.Dict[str, typing.Any]. - """ - _response = await self._client_wrapper.httpx_client.request( - "PATCH", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), - json=jsonable_encoder(request), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - async def delete_tool(self, tool_id: str) -> ToolOutput: - """ - Delete a specific tool - - Parameters: - - tool_id: str. - """ - _response = await self._client_wrapper.httpx_client.request( - "DELETE", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/tools/{tool_id}"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(ToolOutput, _response.json()) # type: ignore - if _response.status_code == 422: - raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/resources/traces/__init__.py b/src/superagent/resources/traces/__init__.py deleted file mode 100644 index f3ea265..0000000 --- a/src/superagent/resources/traces/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/src/superagent/resources/traces/client.py b/src/superagent/resources/traces/client.py deleted file mode 100644 index 0c92d51..0000000 --- a/src/superagent/resources/traces/client.py +++ /dev/null @@ -1,56 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import typing -import urllib.parse -from json.decoder import JSONDecodeError - -import pydantic - -from ...core.api_error import ApiError -from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper - - -class TracesClient: - def __init__(self, *, client_wrapper: SyncClientWrapper): - self._client_wrapper = client_wrapper - - def list_agent_traces(self) -> typing.Any: - """ - List all agent traces - """ - _response = self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/traces"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) - - -class AsyncTracesClient: - def __init__(self, *, client_wrapper: AsyncClientWrapper): - self._client_wrapper = client_wrapper - - async def list_agent_traces(self) -> typing.Any: - """ - List all agent traces - """ - _response = await self._client_wrapper.httpx_client.request( - "GET", - urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/traces"), - headers=self._client_wrapper.get_headers(), - timeout=60, - ) - if 200 <= _response.status_code < 300: - return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore - try: - _response_json = _response.json() - except JSONDecodeError: - raise ApiError(status_code=_response.status_code, body=_response.text) - raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/resources/user/__init__.py b/src/superagent/resources/user/__init__.py deleted file mode 100644 index f3ea265..0000000 --- a/src/superagent/resources/user/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - diff --git a/src/superagent/resources/documents/__init__.py b/src/superagent/resources/workflow/__init__.py similarity index 100% rename from src/superagent/resources/documents/__init__.py rename to src/superagent/resources/workflow/__init__.py diff --git a/src/superagent/resources/workflow/client.py b/src/superagent/resources/workflow/client.py new file mode 100644 index 0000000..6ce9e63 --- /dev/null +++ b/src/superagent/resources/workflow/client.py @@ -0,0 +1,481 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing +import urllib.parse +from json.decoder import JSONDecodeError + +import pydantic + +from ...core.api_error import ApiError +from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from ...core.jsonable_encoder import jsonable_encoder +from ...errors.unprocessable_entity_error import UnprocessableEntityError +from ...types.app_models_request_workflow import AppModelsRequestWorkflow +from ...types.app_models_response_workflow import AppModelsResponseWorkflow +from ...types.http_validation_error import HttpValidationError +from ...types.workflow_list import WorkflowList + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class WorkflowClient: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + def list(self) -> WorkflowList: + """ + List all workflows + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(WorkflowList, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def create(self, *, request: AppModelsRequestWorkflow) -> AppModelsResponseWorkflow: + """ + Create a new workflow + + Parameters: + - request: AppModelsRequestWorkflow. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def get(self, workflow_id: str) -> AppModelsResponseWorkflow: + """ + Get a single workflow + + Parameters: + - workflow_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def update(self, workflow_id: str, *, request: AppModelsRequestWorkflow) -> AppModelsResponseWorkflow: + """ + Patch a workflow + + Parameters: + - workflow_id: str. + + - request: AppModelsRequestWorkflow. + """ + _response = self._client_wrapper.httpx_client.request( + "PATCH", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete(self, workflow_id: str) -> typing.Any: + """ + Delete a specific workflow + + Parameters: + - workflow_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def invoke(self, workflow_id: str, *, input: str, enable_streaming: bool) -> typing.Any: + """ + Invoke a specific workflow + + Parameters: + - workflow_id: str. + + - input: str. + + - enable_streaming: bool. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/invoke"), + json=jsonable_encoder({"input": input, "enableStreaming": enable_streaming}), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def list_steps(self, workflow_id: str) -> WorkflowList: + """ + List all steps of a workflow + + Parameters: + - workflow_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/steps"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(WorkflowList, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def add_step( + self, workflow_id: str, *, order: int, agent_id: str, input: str, output: str + ) -> AppModelsResponseWorkflow: + """ + Create a new workflow step + + Parameters: + - workflow_id: str. + + - order: int. + + - agent_id: str. + + - input: str. + + - output: str. + """ + _response = self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/steps"), + json=jsonable_encoder({"order": order, "agentId": agent_id, "input": input, "output": output}), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + def delete_step(self, workflow_id: str, step_id: str) -> typing.Any: + """ + Delete a specific workflow step + + Parameters: + - workflow_id: str. + + - step_id: str. + """ + _response = self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/steps/{step_id}" + ), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + +class AsyncWorkflowClient: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + async def list(self) -> WorkflowList: + """ + List all workflows + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(WorkflowList, _response.json()) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def create(self, *, request: AppModelsRequestWorkflow) -> AppModelsResponseWorkflow: + """ + Create a new workflow + + Parameters: + - request: AppModelsRequestWorkflow. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", "api/v1/workflows"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def get(self, workflow_id: str) -> AppModelsResponseWorkflow: + """ + Get a single workflow + + Parameters: + - workflow_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def update(self, workflow_id: str, *, request: AppModelsRequestWorkflow) -> AppModelsResponseWorkflow: + """ + Patch a workflow + + Parameters: + - workflow_id: str. + + - request: AppModelsRequestWorkflow. + """ + _response = await self._client_wrapper.httpx_client.request( + "PATCH", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}"), + json=jsonable_encoder(request), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete(self, workflow_id: str) -> typing.Any: + """ + Delete a specific workflow + + Parameters: + - workflow_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def invoke(self, workflow_id: str, *, input: str, enable_streaming: bool) -> typing.Any: + """ + Invoke a specific workflow + + Parameters: + - workflow_id: str. + + - input: str. + + - enable_streaming: bool. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/invoke"), + json=jsonable_encoder({"input": input, "enableStreaming": enable_streaming}), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def list_steps(self, workflow_id: str) -> WorkflowList: + """ + List all steps of a workflow + + Parameters: + - workflow_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "GET", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/steps"), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(WorkflowList, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def add_step( + self, workflow_id: str, *, order: int, agent_id: str, input: str, output: str + ) -> AppModelsResponseWorkflow: + """ + Create a new workflow step + + Parameters: + - workflow_id: str. + + - order: int. + + - agent_id: str. + + - input: str. + + - output: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "POST", + urllib.parse.urljoin(f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/steps"), + json=jsonable_encoder({"order": order, "agentId": agent_id, "input": input, "output": output}), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(AppModelsResponseWorkflow, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) + + async def delete_step(self, workflow_id: str, step_id: str) -> typing.Any: + """ + Delete a specific workflow step + + Parameters: + - workflow_id: str. + + - step_id: str. + """ + _response = await self._client_wrapper.httpx_client.request( + "DELETE", + urllib.parse.urljoin( + f"{self._client_wrapper.get_base_url()}/", f"api/v1/workflows/{workflow_id}/steps/{step_id}" + ), + headers=self._client_wrapper.get_headers(), + timeout=60, + ) + if 200 <= _response.status_code < 300: + return pydantic.parse_obj_as(typing.Any, _response.json()) # type: ignore + if _response.status_code == 422: + raise UnprocessableEntityError(pydantic.parse_obj_as(HttpValidationError, _response.json())) # type: ignore + try: + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, body=_response.text) + raise ApiError(status_code=_response.status_code, body=_response_json) diff --git a/src/superagent/types/__init__.py b/src/superagent/types/__init__.py index 236a23f..5cd12b1 100644 --- a/src/superagent/types/__init__.py +++ b/src/superagent/types/__init__.py @@ -1,73 +1,51 @@ # This file was auto-generated by Fern from our API Definition. -from .agent_document_list_ouput import AgentDocumentListOuput -from .agent_document_output import AgentDocumentOutput -from .agent_list_output import AgentListOutput -from .agent_output import AgentOutput -from .agent_tool_list_output import AgentToolListOutput -from .agent_tool_output import AgentToolOutput -from .agent_type import AgentType -from .api_token_list_output import ApiTokenListOutput -from .api_token_output import ApiTokenOutput -from .app_lib_models_response_agent import AppLibModelsResponseAgent -from .app_lib_models_response_agent_document import AppLibModelsResponseAgentDocument -from .app_lib_models_response_agent_tool import AppLibModelsResponseAgentTool -from .app_lib_models_response_api_token import AppLibModelsResponseApiToken -from .app_lib_models_response_document import AppLibModelsResponseDocument -from .app_lib_models_response_prompt import AppLibModelsResponsePrompt -from .app_lib_models_response_tag import AppLibModelsResponseTag -from .app_lib_models_response_tool import AppLibModelsResponseTool -from .document_list_output import DocumentListOutput -from .document_ouput import DocumentOuput -from .document_type import DocumentType +from .agent_datasosurce_list import AgentDatasosurceList +from .agent_list import AgentList +from .agent_run_list import AgentRunList +from .agent_tool_list import AgentToolList +from .api_user import ApiUser +from .app_models_request_agent import AppModelsRequestAgent +from .app_models_request_datasource import AppModelsRequestDatasource +from .app_models_request_llm import AppModelsRequestLlm +from .app_models_request_tool import AppModelsRequestTool +from .app_models_request_workflow import AppModelsRequestWorkflow +from .app_models_response_agent import AppModelsResponseAgent +from .app_models_response_agent_invoke import AppModelsResponseAgentInvoke +from .app_models_response_datasource import AppModelsResponseDatasource +from .app_models_response_llm import AppModelsResponseLlm +from .app_models_response_tool import AppModelsResponseTool +from .app_models_response_workflow import AppModelsResponseWorkflow +from .datasource_list import DatasourceList from .http_validation_error import HttpValidationError -from .predict_agent_output import PredictAgentOutput -from .prompt_list_output import PromptListOutput -from .prompt_output import PromptOutput -from .sign_in_output import SignInOutput -from .tag_list_output import TagListOutput -from .tag_output import TagOutput -from .tool_list_output import ToolListOutput -from .tool_output import ToolOutput -from .tool_type import ToolType -from .user import User -from .user_output import UserOutput +from .llm_list import LlmList +from .tool_list import ToolList from .validation_error import ValidationError from .validation_error_loc_item import ValidationErrorLocItem +from .workflow_list import WorkflowList __all__ = [ - "AgentDocumentListOuput", - "AgentDocumentOutput", - "AgentListOutput", - "AgentOutput", - "AgentToolListOutput", - "AgentToolOutput", - "AgentType", - "ApiTokenListOutput", - "ApiTokenOutput", - "AppLibModelsResponseAgent", - "AppLibModelsResponseAgentDocument", - "AppLibModelsResponseAgentTool", - "AppLibModelsResponseApiToken", - "AppLibModelsResponseDocument", - "AppLibModelsResponsePrompt", - "AppLibModelsResponseTag", - "AppLibModelsResponseTool", - "DocumentListOutput", - "DocumentOuput", - "DocumentType", + "AgentDatasosurceList", + "AgentList", + "AgentRunList", + "AgentToolList", + "ApiUser", + "AppModelsRequestAgent", + "AppModelsRequestDatasource", + "AppModelsRequestLlm", + "AppModelsRequestTool", + "AppModelsRequestWorkflow", + "AppModelsResponseAgent", + "AppModelsResponseAgentInvoke", + "AppModelsResponseDatasource", + "AppModelsResponseLlm", + "AppModelsResponseTool", + "AppModelsResponseWorkflow", + "DatasourceList", "HttpValidationError", - "PredictAgentOutput", - "PromptListOutput", - "PromptOutput", - "SignInOutput", - "TagListOutput", - "TagOutput", - "ToolListOutput", - "ToolOutput", - "ToolType", - "User", - "UserOutput", + "LlmList", + "ToolList", "ValidationError", "ValidationErrorLocItem", + "WorkflowList", ] diff --git a/src/superagent/types/tag_output.py b/src/superagent/types/agent_datasosurce_list.py similarity index 82% rename from src/superagent/types/tag_output.py rename to src/superagent/types/agent_datasosurce_list.py index 8bde344..9d2e146 100644 --- a/src/superagent/types/tag_output.py +++ b/src/superagent/types/agent_datasosurce_list.py @@ -6,12 +6,11 @@ import pydantic from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_tag import AppLibModelsResponseTag -class TagOutput(pydantic.BaseModel): +class AgentDatasosurceList(pydantic.BaseModel): success: bool - data: typing.Optional[AppLibModelsResponseTag] + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/agent_document_list_ouput.py b/src/superagent/types/agent_document_list_ouput.py deleted file mode 100644 index e74a50e..0000000 --- a/src/superagent/types/agent_document_list_ouput.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_agent_document import AppLibModelsResponseAgentDocument - - -class AgentDocumentListOuput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseAgentDocument] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_list.py b/src/superagent/types/agent_list.py new file mode 100644 index 0000000..816d0ee --- /dev/null +++ b/src/superagent/types/agent_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AgentList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_list_output.py b/src/superagent/types/agent_list_output.py deleted file mode 100644 index b10f270..0000000 --- a/src/superagent/types/agent_list_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_agent import AppLibModelsResponseAgent - - -class AgentListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseAgent] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_output.py b/src/superagent/types/agent_output.py deleted file mode 100644 index 89ced32..0000000 --- a/src/superagent/types/agent_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_agent import AppLibModelsResponseAgent - - -class AgentOutput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponseAgent] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_run_list.py b/src/superagent/types/agent_run_list.py new file mode 100644 index 0000000..d1e675b --- /dev/null +++ b/src/superagent/types/agent_run_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AgentRunList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_tool_list.py b/src/superagent/types/agent_tool_list.py new file mode 100644 index 0000000..e623b93 --- /dev/null +++ b/src/superagent/types/agent_tool_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AgentToolList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_tool_list_output.py b/src/superagent/types/agent_tool_list_output.py deleted file mode 100644 index ad3716c..0000000 --- a/src/superagent/types/agent_tool_list_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_agent_tool import AppLibModelsResponseAgentTool - - -class AgentToolListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseAgentTool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_tool_output.py b/src/superagent/types/agent_tool_output.py deleted file mode 100644 index 4c5f13a..0000000 --- a/src/superagent/types/agent_tool_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_agent_tool import AppLibModelsResponseAgentTool - - -class AgentToolOutput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponseAgentTool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/agent_type.py b/src/superagent/types/agent_type.py deleted file mode 100644 index ac68021..0000000 --- a/src/superagent/types/agent_type.py +++ /dev/null @@ -1,29 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import enum -import typing - -T_Result = typing.TypeVar("T_Result") - - -class AgentType(str, enum.Enum): - """ - An enumeration. - """ - - REACT = "REACT" - PLANSOLVE = "PLANSOLVE" - OPENAI = "OPENAI" - - def visit( - self, - react: typing.Callable[[], T_Result], - plansolve: typing.Callable[[], T_Result], - openai: typing.Callable[[], T_Result], - ) -> T_Result: - if self is AgentType.REACT: - return react() - if self is AgentType.PLANSOLVE: - return plansolve() - if self is AgentType.OPENAI: - return openai() diff --git a/src/superagent/types/api_token_list_output.py b/src/superagent/types/api_token_list_output.py deleted file mode 100644 index 5c7da7a..0000000 --- a/src/superagent/types/api_token_list_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_api_token import AppLibModelsResponseApiToken - - -class ApiTokenListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseApiToken] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/user_output.py b/src/superagent/types/api_user.py similarity index 88% rename from src/superagent/types/user_output.py rename to src/superagent/types/api_user.py index 929d682..7018148 100644 --- a/src/superagent/types/user_output.py +++ b/src/superagent/types/api_user.py @@ -6,12 +6,11 @@ import pydantic from ..core.datetime_utils import serialize_datetime -from .user import User -class UserOutput(pydantic.BaseModel): +class ApiUser(pydantic.BaseModel): success: bool - data: typing.Optional[User] + data: typing.Optional[typing.Dict[str, typing.Any]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/app_lib_models_response_agent.py b/src/superagent/types/app_lib_models_response_agent.py deleted file mode 100644 index 8e7e854..0000000 --- a/src/superagent/types/app_lib_models_response_agent.py +++ /dev/null @@ -1,44 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .agent_type import AgentType - - -class AppLibModelsResponseAgent(pydantic.BaseModel): - id: str - description: typing.Optional[str] - avatar_url: typing.Optional[str] = pydantic.Field(alias="avatarUrl") - shareable_token: typing.Optional[str] = pydantic.Field(alias="shareableToken") - user_id: str = pydantic.Field(alias="userId") - document_id: typing.Optional[str] = pydantic.Field(alias="documentId") - tool_id: typing.Optional[str] = pydantic.Field(alias="toolId") - tags: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] - prompt_id: typing.Optional[str] = pydantic.Field(alias="promptId") - prompt: typing.Optional[typing.Dict[str, typing.Any]] - name: str - type: AgentType - llm: typing.Dict[str, typing.Any] - has_memory: bool = pydantic.Field(alias="hasMemory") - is_public: bool = pydantic.Field(alias="isPublic") - is_listed: bool = pydantic.Field(alias="isListed") - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_lib_models_response_agent_document.py b/src/superagent/types/app_lib_models_response_agent_document.py deleted file mode 100644 index 499c549..0000000 --- a/src/superagent/types/app_lib_models_response_agent_document.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_document import AppLibModelsResponseDocument - - -class AppLibModelsResponseAgentDocument(pydantic.BaseModel): - id: str - document_id: typing.Optional[str] = pydantic.Field(alias="documentId") - document: typing.Optional[AppLibModelsResponseDocument] - agent_id: str = pydantic.Field(alias="agentId") - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - deleted_at: typing.Optional[dt.datetime] = pydantic.Field(alias="deletedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_lib_models_response_agent_tool.py b/src/superagent/types/app_lib_models_response_agent_tool.py deleted file mode 100644 index caeb3d2..0000000 --- a/src/superagent/types/app_lib_models_response_agent_tool.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_tool import AppLibModelsResponseTool - - -class AppLibModelsResponseAgentTool(pydantic.BaseModel): - id: str - tool_id: typing.Optional[str] = pydantic.Field(alias="toolId") - tool: typing.Optional[AppLibModelsResponseTool] - agent_id: str = pydantic.Field(alias="agentId") - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - deleted_at: typing.Optional[dt.datetime] = pydantic.Field(alias="deletedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_lib_models_response_document.py b/src/superagent/types/app_lib_models_response_document.py deleted file mode 100644 index dcbe218..0000000 --- a/src/superagent/types/app_lib_models_response_document.py +++ /dev/null @@ -1,40 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .document_type import DocumentType - - -class AppLibModelsResponseDocument(pydantic.BaseModel): - id: str - description: typing.Optional[str] - user_id: str = pydantic.Field(alias="userId") - type: DocumentType - url: typing.Optional[str] - content: typing.Optional[str] - content_hash: typing.Optional[str] = pydantic.Field(alias="contentHash") - name: str - splitter: typing.Optional[typing.Dict[str, typing.Any]] - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - index: typing.Optional[typing.Dict[str, typing.Any]] - authorization: typing.Optional[typing.Dict[str, typing.Any]] - metadata: typing.Optional[typing.Dict[str, typing.Any]] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_lib_models_response_prompt.py b/src/superagent/types/app_lib_models_response_prompt.py deleted file mode 100644 index 6a60eb3..0000000 --- a/src/superagent/types/app_lib_models_response_prompt.py +++ /dev/null @@ -1,33 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime - - -class AppLibModelsResponsePrompt(pydantic.BaseModel): - id: str - name: str - template: str - input_variables: typing.List[typing.Any] - user_id: str = pydantic.Field(alias="userId") - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - deleted_at: typing.Optional[dt.datetime] = pydantic.Field(alias="deletedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_lib_models_response_tool.py b/src/superagent/types/app_lib_models_response_tool.py deleted file mode 100644 index ef3b996..0000000 --- a/src/superagent/types/app_lib_models_response_tool.py +++ /dev/null @@ -1,35 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .tool_type import ToolType - - -class AppLibModelsResponseTool(pydantic.BaseModel): - id: str - name: str - description: typing.Optional[str] - type: typing.Optional[ToolType] - metadata: typing.Optional[typing.Dict[str, typing.Any]] - user_id: str = pydantic.Field(alias="userId") - return_direct: bool = pydantic.Field(alias="returnDirect") - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_lib_models_response_tag.py b/src/superagent/types/app_models_request_agent.py similarity index 71% rename from src/superagent/types/app_lib_models_response_tag.py rename to src/superagent/types/app_models_request_agent.py index 8db3c7f..ad9babf 100644 --- a/src/superagent/types/app_lib_models_response_tag.py +++ b/src/superagent/types/app_models_request_agent.py @@ -8,13 +8,13 @@ from ..core.datetime_utils import serialize_datetime -class AppLibModelsResponseTag(pydantic.BaseModel): - id: str +class AppModelsRequestAgent(pydantic.BaseModel): + is_active: typing.Optional[bool] = pydantic.Field(alias="isActive") name: str - color: typing.Optional[str] - user_id: str = pydantic.Field(alias="userId") - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") + prompt: typing.Optional[str] + llm_model: str = pydantic.Field(alias="llmModel") + description: str + avatar: typing.Optional[str] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/api_token_output.py b/src/superagent/types/app_models_request_datasource.py similarity index 78% rename from src/superagent/types/api_token_output.py rename to src/superagent/types/app_models_request_datasource.py index 7e891f2..77790e5 100644 --- a/src/superagent/types/api_token_output.py +++ b/src/superagent/types/app_models_request_datasource.py @@ -6,12 +6,14 @@ import pydantic from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_api_token import AppLibModelsResponseApiToken -class ApiTokenOutput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponseApiToken] +class AppModelsRequestDatasource(pydantic.BaseModel): + name: str + description: str + type: str + url: typing.Optional[str] + metadata: typing.Optional[typing.Dict[str, typing.Any]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/app_lib_models_response_api_token.py b/src/superagent/types/app_models_request_llm.py similarity index 81% rename from src/superagent/types/app_lib_models_response_api_token.py rename to src/superagent/types/app_models_request_llm.py index 31da63e..d1be410 100644 --- a/src/superagent/types/app_lib_models_response_api_token.py +++ b/src/superagent/types/app_models_request_llm.py @@ -8,11 +8,10 @@ from ..core.datetime_utils import serialize_datetime -class AppLibModelsResponseApiToken(pydantic.BaseModel): - id: str - user_id: str = pydantic.Field(alias="userId") - description: str - token: str +class AppModelsRequestLlm(pydantic.BaseModel): + provider: str + api_key: str = pydantic.Field(alias="apiKey") + options: typing.Optional[typing.Dict[str, typing.Any]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/agent_document_output.py b/src/superagent/types/app_models_request_tool.py similarity index 71% rename from src/superagent/types/agent_document_output.py rename to src/superagent/types/app_models_request_tool.py index d5d14be..05252fa 100644 --- a/src/superagent/types/agent_document_output.py +++ b/src/superagent/types/app_models_request_tool.py @@ -6,12 +6,14 @@ import pydantic from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_agent_document import AppLibModelsResponseAgentDocument -class AgentDocumentOutput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponseAgentDocument] +class AppModelsRequestTool(pydantic.BaseModel): + name: str + description: str + type: str + metadata: typing.Optional[typing.Dict[str, typing.Any]] + return_direct: typing.Optional[bool] = pydantic.Field(alias="returnDirect") def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} @@ -24,4 +26,5 @@ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: class Config: frozen = True smart_union = True + allow_population_by_field_name = True json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tag_list_output.py b/src/superagent/types/app_models_request_workflow.py similarity index 80% rename from src/superagent/types/tag_list_output.py rename to src/superagent/types/app_models_request_workflow.py index 0ee3fe5..bf9e47c 100644 --- a/src/superagent/types/tag_list_output.py +++ b/src/superagent/types/app_models_request_workflow.py @@ -6,12 +6,11 @@ import pydantic from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_tag import AppLibModelsResponseTag -class TagListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseTag] +class AppModelsRequestWorkflow(pydantic.BaseModel): + name: str + description: str def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/app_models_response_agent.py b/src/superagent/types/app_models_response_agent.py new file mode 100644 index 0000000..18f2cb8 --- /dev/null +++ b/src/superagent/types/app_models_response_agent.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AppModelsResponseAgent(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.Dict[str, typing.Any]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/sign_in_output.py b/src/superagent/types/app_models_response_agent_invoke.py similarity index 93% rename from src/superagent/types/sign_in_output.py rename to src/superagent/types/app_models_response_agent_invoke.py index c038e4a..5ce34f5 100644 --- a/src/superagent/types/sign_in_output.py +++ b/src/superagent/types/app_models_response_agent_invoke.py @@ -8,7 +8,7 @@ from ..core.datetime_utils import serialize_datetime -class SignInOutput(pydantic.BaseModel): +class AppModelsResponseAgentInvoke(pydantic.BaseModel): success: bool data: typing.Dict[str, typing.Any] diff --git a/src/superagent/types/app_models_response_datasource.py b/src/superagent/types/app_models_response_datasource.py new file mode 100644 index 0000000..b990d93 --- /dev/null +++ b/src/superagent/types/app_models_response_datasource.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AppModelsResponseDatasource(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.Dict[str, typing.Any]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/predict_agent_output.py b/src/superagent/types/app_models_response_llm.py similarity index 87% rename from src/superagent/types/predict_agent_output.py rename to src/superagent/types/app_models_response_llm.py index f5f18c3..5644275 100644 --- a/src/superagent/types/predict_agent_output.py +++ b/src/superagent/types/app_models_response_llm.py @@ -8,10 +8,9 @@ from ..core.datetime_utils import serialize_datetime -class PredictAgentOutput(pydantic.BaseModel): +class AppModelsResponseLlm(pydantic.BaseModel): success: bool - data: str - trace: typing.Dict[str, typing.Any] + data: typing.Optional[typing.Dict[str, typing.Any]] def json(self, **kwargs: typing.Any) -> str: kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} diff --git a/src/superagent/types/app_models_response_tool.py b/src/superagent/types/app_models_response_tool.py new file mode 100644 index 0000000..6e9ae7c --- /dev/null +++ b/src/superagent/types/app_models_response_tool.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AppModelsResponseTool(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.Dict[str, typing.Any]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/app_models_response_workflow.py b/src/superagent/types/app_models_response_workflow.py new file mode 100644 index 0000000..7dabc01 --- /dev/null +++ b/src/superagent/types/app_models_response_workflow.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class AppModelsResponseWorkflow(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.Dict[str, typing.Any]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/datasource_list.py b/src/superagent/types/datasource_list.py new file mode 100644 index 0000000..e3da99d --- /dev/null +++ b/src/superagent/types/datasource_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class DatasourceList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/document_list_output.py b/src/superagent/types/document_list_output.py deleted file mode 100644 index 297f9a9..0000000 --- a/src/superagent/types/document_list_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_document import AppLibModelsResponseDocument - - -class DocumentListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseDocument] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/document_ouput.py b/src/superagent/types/document_ouput.py deleted file mode 100644 index e2e21fc..0000000 --- a/src/superagent/types/document_ouput.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_document import AppLibModelsResponseDocument - - -class DocumentOuput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponseDocument] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/document_type.py b/src/superagent/types/document_type.py deleted file mode 100644 index 5fb3c99..0000000 --- a/src/superagent/types/document_type.py +++ /dev/null @@ -1,77 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import enum -import typing - -T_Result = typing.TypeVar("T_Result") - - -class DocumentType(str, enum.Enum): - """ - An enumeration. - """ - - TXT = "TXT" - PDF = "PDF" - CSV = "CSV" - YOUTUBE = "YOUTUBE" - OPENAPI = "OPENAPI" - URL = "URL" - MARKDOWN = "MARKDOWN" - FIRESTORE = "FIRESTORE" - PSYCHIC = "PSYCHIC" - GITHUB_REPOSITORY = "GITHUB_REPOSITORY" - WEBPAGE = "WEBPAGE" - STRIPE = "STRIPE" - AIRTABLE = "AIRTABLE" - SITEMAP = "SITEMAP" - NOTION = "NOTION" - - def visit( - self, - txt: typing.Callable[[], T_Result], - pdf: typing.Callable[[], T_Result], - csv: typing.Callable[[], T_Result], - youtube: typing.Callable[[], T_Result], - openapi: typing.Callable[[], T_Result], - url: typing.Callable[[], T_Result], - markdown: typing.Callable[[], T_Result], - firestore: typing.Callable[[], T_Result], - psychic: typing.Callable[[], T_Result], - github_repository: typing.Callable[[], T_Result], - webpage: typing.Callable[[], T_Result], - stripe: typing.Callable[[], T_Result], - airtable: typing.Callable[[], T_Result], - sitemap: typing.Callable[[], T_Result], - notion: typing.Callable[[], T_Result], - ) -> T_Result: - if self is DocumentType.TXT: - return txt() - if self is DocumentType.PDF: - return pdf() - if self is DocumentType.CSV: - return csv() - if self is DocumentType.YOUTUBE: - return youtube() - if self is DocumentType.OPENAPI: - return openapi() - if self is DocumentType.URL: - return url() - if self is DocumentType.MARKDOWN: - return markdown() - if self is DocumentType.FIRESTORE: - return firestore() - if self is DocumentType.PSYCHIC: - return psychic() - if self is DocumentType.GITHUB_REPOSITORY: - return github_repository() - if self is DocumentType.WEBPAGE: - return webpage() - if self is DocumentType.STRIPE: - return stripe() - if self is DocumentType.AIRTABLE: - return airtable() - if self is DocumentType.SITEMAP: - return sitemap() - if self is DocumentType.NOTION: - return notion() diff --git a/src/superagent/types/llm_list.py b/src/superagent/types/llm_list.py new file mode 100644 index 0000000..fd402d9 --- /dev/null +++ b/src/superagent/types/llm_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class LlmList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prompt_list_output.py b/src/superagent/types/prompt_list_output.py deleted file mode 100644 index 35af273..0000000 --- a/src/superagent/types/prompt_list_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_prompt import AppLibModelsResponsePrompt - - -class PromptListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponsePrompt] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/prompt_output.py b/src/superagent/types/prompt_output.py deleted file mode 100644 index 5ac1477..0000000 --- a/src/superagent/types/prompt_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_prompt import AppLibModelsResponsePrompt - - -class PromptOutput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponsePrompt] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_list.py b/src/superagent/types/tool_list.py new file mode 100644 index 0000000..13ea3f0 --- /dev/null +++ b/src/superagent/types/tool_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class ToolList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_list_output.py b/src/superagent/types/tool_list_output.py deleted file mode 100644 index c1a7c53..0000000 --- a/src/superagent/types/tool_list_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_tool import AppLibModelsResponseTool - - -class ToolListOutput(pydantic.BaseModel): - success: bool - data: typing.List[AppLibModelsResponseTool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_output.py b/src/superagent/types/tool_output.py deleted file mode 100644 index 42a3419..0000000 --- a/src/superagent/types/tool_output.py +++ /dev/null @@ -1,27 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime -from .app_lib_models_response_tool import AppLibModelsResponseTool - - -class ToolOutput(pydantic.BaseModel): - success: bool - data: typing.Optional[AppLibModelsResponseTool] - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/tool_type.py b/src/superagent/types/tool_type.py deleted file mode 100644 index 8c43042..0000000 --- a/src/superagent/types/tool_type.py +++ /dev/null @@ -1,53 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import enum -import typing - -T_Result = typing.TypeVar("T_Result") - - -class ToolType(str, enum.Enum): - """ - An enumeration. - """ - - BROWSER = "BROWSER" - SEARCH = "SEARCH" - WOLFRAM_ALPHA = "WOLFRAM_ALPHA" - REPLICATE = "REPLICATE" - ZAPIER_NLA = "ZAPIER_NLA" - AGENT = "AGENT" - OPENAPI = "OPENAPI" - CHATGPT_PLUGIN = "CHATGPT_PLUGIN" - METAPHOR = "METAPHOR" - - def visit( - self, - browser: typing.Callable[[], T_Result], - search: typing.Callable[[], T_Result], - wolfram_alpha: typing.Callable[[], T_Result], - replicate: typing.Callable[[], T_Result], - zapier_nla: typing.Callable[[], T_Result], - agent: typing.Callable[[], T_Result], - openapi: typing.Callable[[], T_Result], - chatgpt_plugin: typing.Callable[[], T_Result], - metaphor: typing.Callable[[], T_Result], - ) -> T_Result: - if self is ToolType.BROWSER: - return browser() - if self is ToolType.SEARCH: - return search() - if self is ToolType.WOLFRAM_ALPHA: - return wolfram_alpha() - if self is ToolType.REPLICATE: - return replicate() - if self is ToolType.ZAPIER_NLA: - return zapier_nla() - if self is ToolType.AGENT: - return agent() - if self is ToolType.OPENAPI: - return openapi() - if self is ToolType.CHATGPT_PLUGIN: - return chatgpt_plugin() - if self is ToolType.METAPHOR: - return metaphor() diff --git a/src/superagent/types/user.py b/src/superagent/types/user.py deleted file mode 100644 index 310f39c..0000000 --- a/src/superagent/types/user.py +++ /dev/null @@ -1,34 +0,0 @@ -# This file was auto-generated by Fern from our API Definition. - -import datetime as dt -import typing - -import pydantic - -from ..core.datetime_utils import serialize_datetime - - -class User(pydantic.BaseModel): - id: str - email: str - password: typing.Optional[str] - name: typing.Optional[str] - created_at: typing.Optional[dt.datetime] = pydantic.Field(alias="createdAt") - updated_at: typing.Optional[dt.datetime] = pydantic.Field(alias="updatedAt") - deleted_at: typing.Optional[dt.datetime] = pydantic.Field(alias="deletedAt") - provider: typing.Optional[str] - access_token: typing.Optional[str] = pydantic.Field(alias="accessToken") - - def json(self, **kwargs: typing.Any) -> str: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().json(**kwargs_with_defaults) - - def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: - kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} - return super().dict(**kwargs_with_defaults) - - class Config: - frozen = True - smart_union = True - allow_population_by_field_name = True - json_encoders = {dt.datetime: serialize_datetime} diff --git a/src/superagent/types/workflow_list.py b/src/superagent/types/workflow_list.py new file mode 100644 index 0000000..4ceba38 --- /dev/null +++ b/src/superagent/types/workflow_list.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing + +import pydantic + +from ..core.datetime_utils import serialize_datetime + + +class WorkflowList(pydantic.BaseModel): + success: bool + data: typing.Optional[typing.List[typing.Dict[str, typing.Any]]] + + def json(self, **kwargs: typing.Any) -> str: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]: + kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs} + return super().dict(**kwargs_with_defaults) + + class Config: + frozen = True + smart_union = True + json_encoders = {dt.datetime: serialize_datetime}